[
  {
    "path": ".dockerignore",
    "content": ".git\n.gitignore\nLICENSE\n*.zip\n*.swp\nexperiments\nmhc_ligand_full*\ntraining/class1_allele_specific/data/mhc_ligand_full.*\n"
  },
  {
    "path": ".github/workflows/build.yml",
    "content": "name: build\n\non:\n  workflow_dispatch: {}\n  workflow_call: {}\n\n\njobs:\n  build:\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v3\n      - name: Set up Python\n        uses: actions/setup-python@v3\n        with:\n          python-version: 3.11\n      - name: Install dependencies\n        run: |\n          python -m pip install --upgrade pip\n          pip install build\n      - name: Build package\n        run: |\n          python -m build -sw\n          ls -lh dist/\n      - name: Upload artifact\n        uses: actions/upload-artifact@v4\n        with:\n          name: dist\n          path: dist/\n"
  },
  {
    "path": ".github/workflows/ci.yml",
    "content": "name: CI\n\non:\n  push:\n    branches: [\"master\"]\n  pull_request:\n    branches: [\"master\"]\n\njobs:\n  build:\n    runs-on: ubuntu-latest\n    defaults:\n      run:\n        shell: bash -el {0}\n    strategy:\n      fail-fast: false\n      matrix:\n        python-version: [\"3.10\", \"3.11\", \"3.12\"]\n\n    steps:\n      - name: Checkout code\n        uses: actions/checkout@v3\n\n      - name: Set up Python ${{ matrix.python-version }}\n        uses: actions/setup-python@v3\n        with:\n          python-version: ${{ matrix.python-version }}\n\n      - name: Install system dependencies\n        run: |\n          sudo apt-get update\n          sudo apt-get install -y pandoc\n      - name: Build Conda environment\n        uses: conda-incubator/setup-miniconda@v2\n        with:\n          activate-environment: test-environment\n          environment-file: test-environment.yml\n          python-version: ${{ matrix.python-version }}\n          auto-activate-base: false\n\n      - name: Install python dependencies\n        run: |\n          pip install --upgrade pip\n          pip install flake8 nose-py3 pytest pytest-cov coveralls\n          pip install -r requirements.txt\n          pip install -r docs/requirements.txt\n          pip install .\n      #- name: Lint with flake8\n      #  run: |\n      #    # stop the build if there are Python syntax errors or undefined names\n      #    flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics\n      #    # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide\n      #    # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics\n\n      - name: Download data and models\n        run: |\n          mhcflurry-downloads fetch data_curated data_mass_spec_annotated models_class1 models_class1_presentation models_class1_processing models_class1_pan allele_sequences\n\n      - name: Test with pytest\n        run: |\n          pytest --tb=long --cov=mhcflurry/ --cov-report=term-missing ./test\n\n      # - name: Publish coverage to Coveralls\n      #  uses: coverallsapp/github-action@v2.2.3\n      #  with:\n      #    parallel: true\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "# Based on https://docs.pypi.org/trusted-publishers/using-a-publisher/\n\nname: release\n\non:\n  release:\n    types: [published]\n\njobs:\n  build:\n    uses: ./.github/workflows/build.yml\n  pypi-publish:\n    name: upload release to PyPI\n    needs: build\n    runs-on: ubuntu-latest\n    environment: release\n    permissions:\n      id-token: write  # IMPORTANT: mandatory for trusted publishing\n    steps:\n      - name: Download build artifacts\n        uses: actions/download-artifact@v4\n        with:\n          name: dist\n          path: dist\n      - name: Publish package distributions to PyPI\n        uses: pypa/gh-action-pypi-publish@release/v1\n\n"
  },
  {
    "path": ".github/workflows/release_testpypi.yml",
    "content": "# Based on https://docs.pypi.org/trusted-publishers/using-a-publisher/\n\nname: release_testpypi\n\non:\n  workflow_dispatch: {}\n\njobs:\n  build:\n    uses: ./.github/workflows/build.yml\n  publish-to-testpypi:\n    name: upload release to TestPyPI\n    needs: build\n    runs-on: ubuntu-latest\n    environment: release_testpypi\n    permissions:\n      id-token: write  # IMPORTANT: mandatory for trusted publishing\n    steps:\n      - name: Download build artifacts\n        uses: actions/download-artifact@v4\n        with:\n          name: dist\n          path: dist\n      - name: Publish distribution to TestPyPI\n        uses: pypa/gh-action-pypi-publish@release/v1\n        with:\n          repository-url: https://test.pypi.org/legacy/\n"
  },
  {
    "path": ".gitignore",
    "content": "# Swap files\n*.swp\n\n# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nenv/\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\n*.egg-info/\n.installed.cfg\n*.egg\n\n# IDE\n.idea/\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*,cover\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\n\n# Sphinx documentation\ndocs/_build/\ndocs/_static\ndocs/_templates\n\n# PyBuilder\ntarget/\n\n# Data files\n*.zip\n\n# Dask distributed clutter\n.dask-web-ui.json\n\n# ipython checkpoints\n.ipynb_checkpoints\n\n# OS X extra files\n.DS_Store\n"
  },
  {
    "path": "AGENTS.md",
    "content": "# AGENTS.md — mhcflurry\n\nGuide for coding agents working in this repo. Read this before touching code.\n\n---\n\n## Golden Rules\n\n1. **Never commit to `main`.** Always `git checkout -b <feature-branch>` before editing. Land via PR.\n2. **Every PR bumps the version.** Even doc-only PRs — at minimum a patch bump in the package's `__init__.py` / `_version.py`.\n3. **\"Done\" means merged AND released** — never stop at merge. mhcflurry doesn't (yet) have `deploy.sh`; follow the release recipe in `CONTRIBUTING.md` / `NOTES.md` and push the tag so PyPI gets the new version. Skipping release = task not done.\n4. **File problems as issues, don't silently work around them.** If you hit a bug here or in a sibling openvax/pirl-unc repo, open a GitHub issue on the correct repo and link it from the PR.\n5. **After a PR ships, look for the next block of work.** Read open issues across the relevant openvax repos, group by dependency + urgency. Prefer *foundational* changes that unblock multiple downstream improvements; otherwise chain the smallest independent improvements.\n\n---\n\n## Repo Shape (read before scripting)\n\nUnlike its siblings, mhcflurry does **not** have `test.sh`, `deploy.sh`, or `format.sh`. It has:\n\n- `develop.sh` — **source** this (`source develop.sh`) to create/activate `.venv` and editable-install. Do not `./develop.sh` (its venv activation won't persist).\n- `lint.sh` — `ruff check mhcflurry/ test/` (note: tests live in `test/`, singular).\n- `setup.py` — packaging.\n- No `pyproject.toml`.\n\nIf you want to add `test.sh` / `deploy.sh` / `format.sh` to match the other openvax repos, that's a welcome foundational PR — discuss with Alex first.\n\n## Before Completing Any Task\n\nBefore telling the user a change is \"complete\":\n\n1. **`./lint.sh`** — must pass (ruff check)\n2. **Run tests**: `pytest test/` (no `test.sh` wrapper). For the slow ML suite you may need downloaded models — see `docker/` or `test-environment.yml`.\n3. For a PR: **CI must be green on GitHub**, then merge, then release (see Golden Rule 3).\n\n## Code Style\n\n- Python 3.9+\n- Lint: ruff (concise output)\n- Docstrings: numpy style\n- Bugfixes include a regression test where feasible\n- mhcflurry is a trained ML model system — be extremely cautious about changes that could alter predictions without a clear reason. Prediction-affecting changes need empirical validation, not just green tests.\n\n---\n\n## Workflow Orchestration\n\n### 1. Upfront Planning\n- For any non-trivial task (3+ steps or architectural): write a short spec first. If something goes sideways, STOP and re-plan — don't keep pushing.\n\n### 2. Verification Before Done\n- Never claim complete without proof: tests green, CI green, release tagged.\n- For model or training changes: include before/after metrics on a held-out set.\n\n### 3. Autonomous Bug Fixing\n- Given a bug report: just fix it. Point at logs/errors/failing tests and resolve them without hand-holding.\n\n### 4. Demand Elegance (Balanced)\n- For non-trivial changes pause and ask \"is there a more elegant way?\" — skip for trivial fixes.\n- Treat workarounds as bugs, not new abstractions. Rip out legacy paths decisively rather than accumulating special cases.\n\n### 5. Issue Triage After Each Ship\n- Close superseded/outdated issues as you notice them.\n- New problems mid-task → file as issues (on the right repo, even if it's not this one), don't bury.\n\n---\n\n## Core Principles\n\n- **Simplicity first.** Minimal diffs, minimal abstractions.\n- **No laziness.** Find root causes; no temporary fixes, no empty-category fudges.\n- **Minimal blast radius.** Touch only what the task requires.\n\n## Scientific Domain Knowledge\n\n- If a change touches immunology/genomics semantics, check primary sources (papers, UniProt, GenBank) before edits.\n- If the code expresses a scientific model at odds with your understanding, flag it — don't silently \"fix\" it into something wrong.\n- Use `mhcgnomes` for MHC allele parsing. Never `startswith(\"HLA-\")` or other string hacks — alleles aren't always human.\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing to MHCflurry\n\nWe would love your help in making MHCflurry a useful resource for the community. No contribution is too small, and we especially appreciate usability improvements like better documentation, tutorials, tests, or code cleanup.\n\n## Project scope\nWe hope MHCflurry will grow to include **reference implementations for state-of-the-art approaches for T cell epitope prediction**. This includes pan-allele MHC I and II prediction and closely related tasks such as prediction of antigen processing and immunogenicity. It does not include tasks such as B cell (antibody) epitope prediction, prediction of TCR/pMHC interactions, or downstream tasks such as cancer vaccine design. All committed code to MHCflurry should be suitable for regular research use by practioners. This likely means that new models will require a benchmark evaluation with a publication or preprint before they can be accepted.\n\nIf you are contemplating a large contribution, such as the addition of a new predictive model, it probably makes sense to reach out on the Github issue tracker (or email us at hello@openvax.org) to discuss and coordinate the work.\n\n## Making a contribution\nAll contributions can be made as pull requests on Github. One of the core developers will review your contribution. As needed the core contributors will also make releases and submit to PyPI.\n\nA few other guidelines:\n\n * Any generated resource, such as trained models, must be associated with a `GENERATE.sh` script in [downloads-generation](https://github.com/openvax/mhcflurry/tree/master/downloads-generation). Running this script with no arguments should fully reproduce the generated result. Reproducability of MHCflurry trained models and related data (such as curated training data, allele sequences, etc.) is key to allowing others to build upon and improve our work.\n * MHCflurry supports Python 3.10+ on Linux and OS X. We can't guarantee support for Windows. If you are having trouble running MHCflurry on Windows we would appreciate contributions that help us address this.\n * All functions should be documented using [numpy-style docstrings](https://numpydoc.readthedocs.io/en/latest/format.html) and associated with unit tests.\n * Bugfixes should be accompanied with test that illustrates the bug when feasible.\n * Contributions are licensed under Apache 2.0\n * Please adhere to our [code of conduct](https://github.com/openvax/mhcflurry/blob/master/code-of-conduct.md).\n\nWorking on your first Pull Request? One resource that may be helpful is [How to Contribute to an Open Source Project on GitHub](https://egghead.io/series/how-to-contribute-to-an-open-source-project-on-github).\n"
  },
  {
    "path": "Dockerfile",
    "content": "FROM continuumio/miniconda3:latest\n\nLABEL maintainer=\"Tim O'Donnell timodonnell@gmail.com\"\n\nWORKDIR /root\n\n# Install system dependencies\nRUN apt-get update -y && apt-get install -y gcc && \\\n    apt-get clean && rm -rf /var/lib/apt/lists/*\n\n# Create a lightweight conda env with Python 3.10\nRUN conda create -n mhcflurry python=3.10 -y && \\\n    conda clean -afy\n\n# Activate the env by modifying PATH\nENV PATH /opt/conda/envs/mhcflurry/bin:$PATH\n\n# Install pip packages in the env\nRUN pip install --no-cache-dir --upgrade pip && \\\n    pip install --no-cache-dir jupyter seaborn\n\n# Install dependencies (doing this first to have them cached)\nCOPY requirements.txt /tmp/mhcflurry-requirements.txt\nRUN pip install --no-cache-dir -r /tmp/mhcflurry-requirements.txt\n\n# Pre-download resources for mhcflurry\nRUN mkdir /tmp/mhcflurry-downloads\nCOPY mhcflurry/downloads.yml /tmp/mhcflurry-downloads\nRUN python -c '\\\nimport yaml, subprocess; \\\nd = yaml.safe_load(open(\"/tmp/mhcflurry-downloads/downloads.yml\")); \\\ndownloads = d[\"releases\"][d[\"current-release\"]][\"downloads\"]; \\\nurls = [item[\"url\"] for item in downloads if item[\"default\"]]; \\\n[subprocess.run([\"wget\", \"-P\", \"/tmp/mhcflurry-downloads\", url]) for url in urls]'\n\n# Copy example notebooks\nCOPY notebooks/* ./\n\n# Copy source code and install mhcflurry in editable mode\nCOPY . mhcflurry\nRUN pip install -e mhcflurry/\n\n# Fetch resources from pre-downloaded data\nRUN mhcflurry-downloads fetch --already-downloaded-dir /tmp/mhcflurry-downloads\n\nEXPOSE 9999\nCMD [\"jupyter\", \"notebook\", \"--port=9999\", \"--no-browser\", \"--ip=0.0.0.0\", \"--allow-root\", \"--NotebookApp.token=''\", \"--NotebookApp.password=''\"]\n\n"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"{}\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright {yyyy} {name of copyright owner}\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n\n"
  },
  {
    "path": "NOTES.md",
    "content": "# Notes\n\n## 2026-02-10\n\n- Goal: match PyTorch branch behavior to TensorFlow master for class I presentation prediction.\n- Confirmed mismatch is isolated to `processing_predictor_with_flanks` path:\n  - Affinity outputs match TF nearly exactly.\n  - Processing without flanks matches TF nearly exactly.\n  - Processing with flanks differs materially.\n- Intermediate feature comparison (single processing model) shows:\n  - `n_flank_cleaved`, `n_flank_internal_cleaved`, `c_flank_cleaved`, `c_flank_internal_cleaved` match TF.\n  - Only `n_flank_avg_dense` and `c_flank_avg_dense` inputs differ.\n- Root cause identified:\n  - TF computes masked flank averages with `reduce_mean(..., axis=1)` over full sequence length.\n  - Current PyTorch computes average over flank positions only.\n  - This changes the two flank-average scalar features and can change top peptide ranking in presentation mode.\n- Fix implemented:\n  - Updated `Class1ProcessingModel` N/C flank-average pooling math to mirror TF exactly:\n    - `mean((x + 1) * mask, axis=sequence_axis) - 1`\n    - denominator is full sequence length.\n- Validation after fix:\n  - Single-model intermediate features now match TF to float noise.\n  - With-flanks processing predictions now match TF to float noise.\n  - End-to-end presentation predictions for test sequences now match TF best-peptide selection.\n  - `test/test_class1_presentation_predictor.py::test_downloaded_predictor` passes.\n  - Parity test subset passes:\n    - `test/test_master_compat_predictions.py`\n    - `test/test_released_master_predictions.py`\n    - `test/test_pytorch_regressions.py`\n- Regression coverage:\n  - Added `test_processing_flank_averages_use_tf_masked_mean_semantics` in\n    `test/test_pytorch_regressions.py`.\n- Tooling add-on:\n  - Added `scripts/modal_train_mhcflurry.py` for running parallel training jobs on Modal.\n- Random TF-vs-PyTorch comparison harness improvements:\n  - Added curated default allele panel in `scripts/compare_tf_pytorch_random_outputs.py`:\n    - ~30 common HLA alleles plus a few animal alleles (`--allele-panel iedb_plus_animals`).\n  - Reduced duplicate work in backend prediction:\n    - Reused `Class1PresentationPredictor.predict(...)` processing outputs for\n      `processing_with_score` and `processing_without_score` columns.\n    - Removed separate direct processing predictor passes.\n  - Runtime sanity:\n    - Full `run --num-examples 5000` dropped from ~142s to ~80s on this machine.\n\n- Added cross-product parity analysis workflow:\n  - New script: `scripts/cross_allele_parity_analysis.py`\n  - Generates random peptides uniformly across supported lengths (requested 7-15).\n  - Crosses peptides against curated allele panel and predicts PT vs TF.\n  - Produces:\n    - prediction tables\n    - numeric parity summaries\n    - break analysis tables/report\n    - plots under `plots/`\n- Executed full run:\n  - `1000` peptides x `35` alleles = `35000` pMHC rows\n  - lengths: `7..15`\n  - key result: no thresholded break events observed; differences remained at\n    expected floating-point noise scale for score outputs and tiny absolute nM\n    differences for affinity outputs.\n- Follow-up experiment with random flanks:\n  - Updated `scripts/cross_allele_parity_analysis.py` to:\n    - generate random N/C flanks per peptide (length 5/5 from model support),\n    - enforce pre-run uniqueness checks on peptide entries:\n      - no repeated `peptide`, `n_flank`, or `c_flank`,\n      - no duplicate `(peptide, n_flank, c_flank)` rows,\n      - no duplicate `(peptide, allele, n_flank, c_flank)` in full dataset,\n    - enforce post-run presentation sanity checks on both PT and TF:\n      - at least 1% rows with score > 0.2,\n      - at least one row with score > 0.9.\n  - Run output dir: `/tmp/mhcflurry-cross-allele-1000-randflanks`\n    - `1000` peptides x `35` alleles = `35000` rows.\n  - Sanity thresholds passed:\n    - PT with-flanks: 1.28% > 0.2, max 0.973\n    - TF with-flanks: 1.28% > 0.2, max 0.973\n    - PT without-flanks: 1.32% > 0.2, max 0.970\n    - TF without-flanks: 1.32% > 0.2, max 0.970\n- High-score fixture extraction for unit tests:\n  - Added `scripts/extract_high_presentation_fixture.py`.\n  - Extracted TF fixture rows from\n    `/tmp/mhcflurry-cross-allele-1000-randflanks/tf_predictions.csv.gz`:\n    - selected peptide+flank contexts where any allele had presentation score > 0.9,\n    - retained all alleles for each selected context (including low scorers),\n    - produced `315` rows (`9` contexts x `35` alleles).\n  - Added fixture files:\n    - `test/data/master_released_class1_presentation_highscore_rows.csv.gz`\n    - `test/data/master_released_class1_presentation_highscore_rows_metadata.json`\n  - Added regression test:\n    - `test/test_released_presentation_highscore_rows.py`\n    - validates fixture high/low context properties and compares released\n      PyTorch predictions against TF fixture outputs.\n\n## 2026-02-12\n\n- Packaging / Torch readiness checks:\n  - Verified `setup.py` publishes `torch>=2.0.0` in metadata and wheel:\n    - `python setup.py egg_info`\n    - `python -m pip wheel --no-deps .`\n  - Verified generated metadata includes `Requires-Dist: torch>=2.0.0`.\n  - Editable install attempt failed in this sandbox due permissions in\n    shared virtualenv `bin/` path, not due packaging metadata.\n\n- Warning triage:\n  - Important forward-compat warnings fixed:\n    - `class1_presentation_predictor.py`: avoid `idxmin` on all-NA rows.\n    - `random_negative_peptides.py`: avoid assigning `NaN` into int-typed frame.\n  - Test warning cleanup:\n    - `test_class1_processing_neural_network.py`: avoid `SettingWithCopyWarning`\n      by copying train/test subsets before assignment.\n  - Deprecated imports cleanup:\n    - `downloads.py`: replaced `pipes.quote` with `shlex.quote`.\n    - `downloads.py`: replaced `pkg_resources.resource_string` with\n      `importlib.resources.files(...).read_text()`.\n\n- Targeted validation after fixes:\n  - `pytest -q test/test_class1_presentation_predictor.py::test_downloaded_predictor_invalid_peptides`\n  - `pytest -q test/test_random_negative_peptides.py::test_random_negative_peptides_by_allele`\n  - `pytest -q test/test_class1_processing_neural_network.py::test_small`\n  - Result: all pass; only isolated `pytest.mark.slow` registration warning remains\n    when running that single test file directly.\n"
  },
  {
    "path": "README.md",
    "content": "[![Build Status](https://github.com/openvax/mhcflurry/actions/workflows/ci.yml/badge.svg)](https://github.com/openvax/mhcflurry/actions/workflows/ci.yml)\n[![Coverage Status](https://coveralls.io/repos/github/openvax/mhcflurry/badge.svg?branch=master)](https://coveralls.io/github/openvax/mhcflurry?branch=master)\n[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openvax/mhcflurry/blob/master/notebooks/mhcflurry-colab.ipynb)\n\n# mhcflurry\n[MHC I](https://en.wikipedia.org/wiki/MHC_class_I) ligand\nprediction package with competitive accuracy and a fast and\n[documented](http://openvax.github.io/mhcflurry/) implementation.\n\n> [!IMPORTANT]\n> **Version 2.2.0** is the first release to use [PyTorch](https://pytorch.org/) as its neural network backend, replacing TensorFlow/Keras used in previous versions. It loads the same published weights and produces equivalent predictions, so existing workflows should continue to work with no changes.\n>\n> Key changes in 2.2.0:\n> - **Backend**: TensorFlow/Keras replaced by PyTorch (>= 2.0)\n> - **Python**: Requires Python 3.10+ (previously 3.9+)\n> - **Dependencies**: `pandas >= 2.0` is now required; `tensorflow` and `keras` are no longer needed\n> - **Hardware**: Automatic GPU detection; Apple Silicon (MPS) is now supported\n>\n> If you are upgrading from 2.1.x, simply `pip install --upgrade mhcflurry`. The published pre-trained models are unchanged and will be loaded and converted automatically.\n\nMHCflurry implements class I peptide/MHC binding affinity prediction.\nThe current version provides pan-MHC I predictors supporting any MHC\nallele of known sequence. MHCflurry runs on Python 3.10+ using the\n[PyTorch](https://pytorch.org/) neural network library.\nIt exposes [command-line](http://openvax.github.io/mhcflurry/commandline_tutorial.html)\nand [Python library](http://openvax.github.io/mhcflurry/python_tutorial.html)\ninterfaces.\n\nMHCflurry also includes two experimental predictors,\nan \"antigen processing\" predictor that attempts to model MHC allele-independent\neffects such as proteosomal cleavage and a \"presentation\" predictor that\nintegrates processing predictions with binding affinity predictions to give a\ncomposite \"presentation score.\" Both models are trained on mass spec-identified\nMHC ligands.\n\nIf you find MHCflurry useful in your research please cite:\n\n> T. O'Donnell, A. Rubinsteyn, U. Laserson. \"MHCflurry 2.0: Improved pan-allele prediction of MHC I-presented peptides by incorporating antigen processing,\" *Cell Systems*, 2020. https://doi.org/10.1016/j.cels.2020.06.010\n\n> T. O'Donnell, A. Rubinsteyn, M. Bonsack, A. B. Riemer, U. Laserson, and J. Hammerbacher, \"MHCflurry: Open-Source Class I MHC Binding Affinity Prediction,\" *Cell Systems*, 2018. https://doi.org/10.1016/j.cels.2018.05.014\n\nPlease file an issue if you have questions or encounter problems.\n\nHave a bugfix or other contribution? We would love your help. See our [contributing guidelines](CONTRIBUTING.md).\n\n## Try it now\n\nYou can generate MHCflurry predictions without any setup by running our Google colaboratory [notebook](https://colab.research.google.com/github/openvax/mhcflurry/blob/master/notebooks/mhcflurry-colab.ipynb).\n\n## Installation (pip)\n\nInstall the package:\n\n```\n$ pip install mhcflurry\n```\n\nDownload our datasets and trained models:\n\n```\n$ mhcflurry-downloads fetch\n```\n\nYou can now generate predictions:\n\n```\n$ mhcflurry-predict \\\n       --alleles HLA-A0201 HLA-A0301 \\\n       --peptides SIINFEKL SIINFEKD SIINFEKQ \\\n       --out /tmp/predictions.csv\n\nWrote: /tmp/predictions.csv\n```\n\nOr scan protein sequences for potential epitopes:\n\n```\n$ mhcflurry-predict-scan \\\n        --sequences MFVFLVLLPLVSSQCVNLTTRTQLPPAYTNSFTRGVYYPDKVFRSSVLHS \\\n        --alleles HLA-A*02:01 \\\n        --out /tmp/predictions.csv\n\nWrote: /tmp/predictions.csv\n```\n\n\nSee the [documentation](http://openvax.github.io/mhcflurry/) for more details.\n\n\n## Docker\nYou can also try the latest (GitHub master) version of MHCflurry using the Docker\nimage hosted on [Dockerhub](https://hub.docker.com/r/openvax/mhcflurry) by\nrunning:\n\n```\n$ docker run -p 9999:9999 --rm openvax/mhcflurry:latest\n```\n\nThis will start a [jupyter](https://jupyter.org/) notebook server in an\nenvironment that has MHCflurry installed. Go to `http://localhost:9999` in a\nbrowser to use it.\n\nTo build the Docker image yourself, from a checkout run:\n\n```\n$ docker build -t mhcflurry:latest .\n$ docker run -p 9999:9999 --rm mhcflurry:latest\n```\n## Predicted sequence motifs\nSequence logos for the binding motifs learned by MHCflurry BA are available [here](https://openvax.github.io/mhcflurry-motifs/).\n\n## Common issues and fixes\n\n### Problems downloading data and models\nSome users have reported HTTP connection issues when using `mhcflurry-downloads fetch`. As a workaround, you can download the data manually (e.g. using `wget`) and then use `mhcflurry-downloads` just to copy the data to the right place.\n\nTo do this, first get the URL(s) of the downloads you need using `mhcflurry-downloads url`:\n\n```\n$ mhcflurry-downloads url models_class1_presentation\nhttps://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_presentation.20200205.tar.bz2```\n```\n\nThen make a directory and download the needed files to this directory:\n\n```\n$ mkdir downloads\n$ wget  --directory-prefix downloads https://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_presentation.20200205.tar.bz2```\n\nHTTP request sent, awaiting response... 200 OK\nLength: 72616448 (69M) [application/octet-stream]\nSaving to: 'downloads/models_class1_presentation.20200205.tar.bz2'\n```\n\nNow call `mhcflurry-downloads fetch` with the `--already-downloaded-dir` option to indicate that the downloads should be retrived from the specified directory:\n\n```\n$ mhcflurry-downloads fetch models_class1_presentation --already-downloaded-dir downloads\n```\n"
  },
  {
    "path": "TODO.md",
    "content": "# TODO\n\n## DONE\n\n- [x] Run broader/full test suite before merge.\n  - `pytest -q` passed: 100 tests.\n\n- [x] Localize parity mismatch component.\n  - Affinity and processing-without-flanks parity confirmed.\n  - Processing-with-flanks identified as source of presentation divergence.\n\n- [x] Create development tracking docs.\n  - Added `NOTES.md` and `TODO.md`.\n\n- [x] Fix with-flanks processing parity vs TF in `mhcflurry/class1_processing_neural_network.py`.\n  - Changed N/C flank-average pooling to match TF masked `reduce_mean` semantics.\n  - Verified by comparing intermediate feature vectors and outputs against TF.\n\n- [x] Validate end-to-end parity after fix.\n  - Targeted TF-vs-PyTorch comparisons now match to near float precision.\n  - `test/test_class1_presentation_predictor.py::test_downloaded_predictor` now passes.\n  - Parity-focused tests pass:\n    - `test/test_master_compat_predictions.py`\n    - `test/test_released_master_predictions.py`\n    - `test/test_pytorch_regressions.py`\n\n- [x] Add regression coverage for with-flanks average behavior.\n  - Added `test_processing_flank_averages_use_tf_masked_mean_semantics`.\n\n- [x] Add Modal training script for larger jobs.\n  - Added `scripts/modal_train_mhcflurry.py` with:\n    - GPU worker function\n    - shared artifacts volume\n    - command-template based parallel launch\n\n- [x] Speed up TF-vs-PyTorch random comparison harness.\n  - Added curated default allele panel (`iedb_plus_animals`) to reduce per-run\n    affinity-group fragmentation.\n  - Removed redundant direct processing passes in `predict-backend`; processing\n    outputs now reused from presentation predictions.\n  - Verified end-to-end run succeeds with expected parity metrics and faster runtime.\n\n- [x] Add cross-product parity analysis + plots for fixed peptide panel across alleles.\n  - Added `scripts/cross_allele_parity_analysis.py`.\n  - Ran `1000` random peptides (uniform lengths `7-15`) across curated panel (`35` alleles).\n  - Generated summaries and plots in `/tmp/mhcflurry-cross-allele-1000-panel`.\n\n- [x] Extend cross-product analysis to random flanks + strict sanity requirements.\n  - Added unique random flank generation per peptide.\n  - Added pre-run duplicate checks for peptide/flank fields.\n  - Added post-run presentation score checks:\n    - >=1% rows with score >0.2\n    - at least one row with score >0.9\n  - Ran and validated in `/tmp/mhcflurry-cross-allele-1000-randflanks`.\n\n- [x] Build a high-score TF fixture for presentation regression tests.\n  - Added `scripts/extract_high_presentation_fixture.py`.\n  - Extracted contexts with any presentation score > 0.9 and retained all\n  alleles per context (including low-score alleles).\n  - Added fixture files under `test/data/` and new test\n  `test/test_released_presentation_highscore_rows.py`.\n\n- [x] Triage and fix important warnings.\n  - Fixed future pandas warning in `Class1PresentationPredictor` (`idxmin` on all-NA rows).\n  - Fixed future pandas warning in `random_negative_peptides` (assigning NaN into int dtype).\n  - Fixed test `SettingWithCopyWarning` in processing NN tests.\n  - Removed deprecated `pipes` and `pkg_resources` usage from `downloads.py`.\n"
  },
  {
    "path": "code-of-conduct.md",
    "content": "# Contributor Covenant Code of Conduct\n\n## Our Pledge\n\nIn the interest of fostering an open and welcoming environment, we as\ncontributors and maintainers pledge to making participation in our project and\nour community a harassment-free experience for everyone, regardless of age, body\nsize, disability, ethnicity, sex characteristics, gender identity and expression,\nlevel of experience, education, socio-economic status, nationality, personal\nappearance, race, religion, or sexual identity and orientation.\n\n## Our Standards\n\nExamples of behavior that contributes to creating a positive environment\ninclude:\n\n* Using welcoming and inclusive language\n* Being respectful of differing viewpoints and experiences\n* Gracefully accepting constructive criticism\n* Focusing on what is best for the community\n* Showing empathy towards other community members\n\nExamples of unacceptable behavior by participants include:\n\n* The use of sexualized language or imagery and unwelcome sexual attention or\n  advances\n* Trolling, insulting/derogatory comments, and personal or political attacks\n* Public or private harassment\n* Publishing others' private information, such as a physical or electronic\n  address, without explicit permission\n* Other conduct which could reasonably be considered inappropriate in a\n  professional setting\n\n## Our Responsibilities\n\nProject maintainers are responsible for clarifying the standards of acceptable\nbehavior and are expected to take appropriate and fair corrective action in\nresponse to any instances of unacceptable behavior.\n\nProject maintainers have the right and responsibility to remove, edit, or\nreject comments, commits, code, wiki edits, issues, and other contributions\nthat are not aligned to this Code of Conduct, or to ban temporarily or\npermanently any contributor for other behaviors that they deem inappropriate,\nthreatening, offensive, or harmful.\n\n## Scope\n\nThis Code of Conduct applies within all project spaces, and it also applies when\nan individual is representing the project or its community in public spaces.\nExamples of representing a project or community include using an official\nproject e-mail address, posting via an official social media account, or acting\nas an appointed representative at an online or offline event. Representation of\na project may be further defined and clarified by project maintainers.\n\n## Enforcement\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\nreported by contacting the project team at hello@openvax.org. All\ncomplaints will be reviewed and investigated and will result in a response that\nis deemed necessary and appropriate to the circumstances. The project team is\nobligated to maintain confidentiality with regard to the reporter of an incident.\nFurther details of specific enforcement policies may be posted separately.\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\nfaith may face temporary or permanent repercussions as determined by other\nmembers of the project's leadership.\n\n## Attribution\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\navailable at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html\n\n[homepage]: https://www.contributor-covenant.org\n\nFor answers to common questions about this code of conduct, see\nhttps://www.contributor-covenant.org/faq\n"
  },
  {
    "path": "compatibility_check/figures/summary.csv",
    "content": "column,mean_abs_diff,max_abs_diff,median_pct_diff,p99_pct_diff\naffinity_prediction,0.000855926090815266,0.005215815061092144,6.4490902031258496e-06,3.179401059073912e-05\naffinity_prediction_high,0.0008876603356824831,0.007624668971402571,1.3224372910328715e-14,5.7364675654514835e-05\naffinity_prediction_low,0.001672594107855598,0.011930973687412916,9.06903376028833e-06,6.726403554922356e-05\naffinity_prediction_percentile,0.0,0.0,0.0,0.0\npres_with_affinity,0.0012569356975690076,0.006232064377400093,9.673636122167752e-06,7.026281525141103e-05\npres_with_affinity_percentile,0.0,0.0,0.0,0.0\npres_with_presentation_percentile,6.398100744094776e-17,1.7763568394002505e-15,0.0,1.1063297085880059e-13\npres_with_presentation_score,1.8840357408035613e-08,1.6878932457276008e-07,9.296827030315175e-06,5.1157195465321214e-05\npres_with_processing_score,2.2056083823879512e-08,6.332993507385254e-08,3.609134553534807e-06,8.415549260993538e-06\npres_without_affinity,0.0012569356975690076,0.006232064377400093,9.673636122167752e-06,7.026281525141103e-05\npres_without_affinity_percentile,0.0,0.0,0.0,0.0\npres_without_presentation_percentile,1.2749942195496836e-16,1.7763568394002505e-15,0.0,1.7011266438791655e-13\npres_without_presentation_score,1.668495578066763e-08,1.7208790847877964e-07,6.2876170033895e-06,4.9695389639194476e-05\npres_without_processing_score,2.8383164760302833e-10,1.4901161138336505e-08,0.0,1.0314542159027186e-06\nprocessing_with_score,2.2056083823879512e-08,6.332993507385254e-08,3.609134553534807e-06,8.415549260993538e-06\nprocessing_without_score,2.8383164760302833e-10,1.4901161138336505e-08,0.0,1.0314542159027186e-06\n"
  },
  {
    "path": "develop.sh",
    "content": "#!/bin/bash\n# Development environment setup script\n# Source this script to activate the venv: source develop.sh\n\nSCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\nVENV_DIR=\"$SCRIPT_DIR/.venv\"\n\n# Check if already in the venv\nif [[ \"$VIRTUAL_ENV\" == \"$VENV_DIR\" ]]; then\n    echo \"Virtual environment already active.\"\nelse\n    if [[ -d \"$VENV_DIR\" ]]; then\n        source \"$VENV_DIR/bin/activate\"\n        echo \"Activated virtual environment: $VENV_DIR\"\n    else\n        echo \"Virtual environment not found. Creating and installing...\"\n        python -m venv \"$VENV_DIR\"\n        source \"$VENV_DIR/bin/activate\"\n        pip install -e .\n        echo \"Activated virtual environment: $VENV_DIR\"\n    fi\nfi\n"
  },
  {
    "path": "docs/Makefile",
    "content": "# Makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line.\nSPHINXOPTS    =\nSPHINXBUILD   = sphinx-build\nPAPER         =\nBUILDDIR      = _build\n\n# Internal variables.\nPAPEROPT_a4     = -D latex_paper_size=a4\nPAPEROPT_letter = -D latex_paper_size=letter\nALLSPHINXOPTS   = -v -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .\n# the i18n builder cannot share the environment and doctrees with the others\nI18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .\n\n.PHONY: help\nhelp:\n\t@echo \"Please use \\`make <target>' where <target> is one of\"\n\t@echo \"  html       to make standalone HTML files\"\n\t@echo \"  dirhtml    to make HTML files named index.html in directories\"\n\t@echo \"  singlehtml to make a single large HTML file\"\n\t@echo \"  pickle     to make pickle files\"\n\t@echo \"  json       to make JSON files\"\n\t@echo \"  htmlhelp   to make HTML files and a HTML help project\"\n\t@echo \"  qthelp     to make HTML files and a qthelp project\"\n\t@echo \"  applehelp  to make an Apple Help Book\"\n\t@echo \"  devhelp    to make HTML files and a Devhelp project\"\n\t@echo \"  epub       to make an epub\"\n\t@echo \"  epub3      to make an epub3\"\n\t@echo \"  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter\"\n\t@echo \"  latexpdf   to make LaTeX files and run them through pdflatex\"\n\t@echo \"  latexpdfja to make LaTeX files and run them through platex/dvipdfmx\"\n\t@echo \"  text       to make text files\"\n\t@echo \"  man        to make manual pages\"\n\t@echo \"  texinfo    to make Texinfo files\"\n\t@echo \"  info       to make Texinfo files and run them through makeinfo\"\n\t@echo \"  gettext    to make PO message catalogs\"\n\t@echo \"  changes    to make an overview of all changed/added/deprecated items\"\n\t@echo \"  xml        to make Docutils-native XML files\"\n\t@echo \"  pseudoxml  to make pseudoxml-XML files for display purposes\"\n\t@echo \"  linkcheck  to check all external links for integrity\"\n\t@echo \"  doctest    to run all doctests embedded in the documentation (if enabled)\"\n\t@echo \"  coverage   to run coverage check of the documentation (if enabled)\"\n\t@echo \"  dummy      to check syntax errors of document sources\"\n\n# Added by Tim\n.PHONY: generate\ngenerate:\n\tsphinx-apidoc -M -f -o _build/ ../mhcflurry\n\n.PHONY: clean\nclean:\n\t# Added by tim: preserve html/.git\n\trm -rf $(BUILDDIR)/html/*\n\tmv $(BUILDDIR)/html /tmp/html-bk\n\trm -rf $(BUILDDIR)/*\n\tmv /tmp/html-bk $(BUILDDIR)/html\n\n.PHONY: html\nhtml:\n\t$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/html.\"\n\n.PHONY: dirhtml\ndirhtml:\n\t$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/dirhtml.\"\n\n.PHONY: singlehtml\nsinglehtml:\n\t$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml\n\t@echo\n\t@echo \"Build finished. The HTML page is in $(BUILDDIR)/singlehtml.\"\n\n.PHONY: pickle\npickle:\n\t$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle\n\t@echo\n\t@echo \"Build finished; now you can process the pickle files.\"\n\n.PHONY: json\njson:\n\t$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json\n\t@echo\n\t@echo \"Build finished; now you can process the JSON files.\"\n\n.PHONY: htmlhelp\nhtmlhelp:\n\t$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp\n\t@echo\n\t@echo \"Build finished; now you can run HTML Help Workshop with the\" \\\n\t      \".hhp project file in $(BUILDDIR)/htmlhelp.\"\n\n.PHONY: qthelp\nqthelp:\n\t$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp\n\t@echo\n\t@echo \"Build finished; now you can run \"qcollectiongenerator\" with the\" \\\n\t      \".qhcp project file in $(BUILDDIR)/qthelp, like this:\"\n\t@echo \"# qcollectiongenerator $(BUILDDIR)/qthelp/MHCflurry.qhcp\"\n\t@echo \"To view the help file:\"\n\t@echo \"# assistant -collectionFile $(BUILDDIR)/qthelp/MHCflurry.qhc\"\n\n.PHONY: applehelp\napplehelp:\n\t$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp\n\t@echo\n\t@echo \"Build finished. The help book is in $(BUILDDIR)/applehelp.\"\n\t@echo \"N.B. You won't be able to view it unless you put it in\" \\\n\t      \"~/Library/Documentation/Help or install it in your application\" \\\n\t      \"bundle.\"\n\n.PHONY: devhelp\ndevhelp:\n\t$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp\n\t@echo\n\t@echo \"Build finished.\"\n\t@echo \"To view the help file:\"\n\t@echo \"# mkdir -p $$HOME/.local/share/devhelp/MHCflurry\"\n\t@echo \"# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MHCflurry\"\n\t@echo \"# devhelp\"\n\n.PHONY: epub\nepub:\n\t$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub\n\t@echo\n\t@echo \"Build finished. The epub file is in $(BUILDDIR)/epub.\"\n\n.PHONY: epub3\nepub3:\n\t$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3\n\t@echo\n\t@echo \"Build finished. The epub3 file is in $(BUILDDIR)/epub3.\"\n\n.PHONY: latex\nlatex:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo\n\t@echo \"Build finished; the LaTeX files are in $(BUILDDIR)/latex.\"\n\t@echo \"Run \\`make' in that directory to run these through (pdf)latex\" \\\n\t      \"(use \\`make latexpdf' here to do that automatically).\"\n\n.PHONY: latexpdf\nlatexpdf:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through pdflatex...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\n.PHONY: latexpdfja\nlatexpdfja:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through platex and dvipdfmx...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\n.PHONY: text\ntext:\n\t$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text\n\t@echo\n\t@echo \"Build finished. The text files are in $(BUILDDIR)/text.\"\n\n.PHONY: man\nman:\n\t$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man\n\t@echo\n\t@echo \"Build finished. The manual pages are in $(BUILDDIR)/man.\"\n\n.PHONY: texinfo\ntexinfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo\n\t@echo \"Build finished. The Texinfo files are in $(BUILDDIR)/texinfo.\"\n\t@echo \"Run \\`make' in that directory to run these through makeinfo\" \\\n\t      \"(use \\`make info' here to do that automatically).\"\n\n.PHONY: info\ninfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo \"Running Texinfo files through makeinfo...\"\n\tmake -C $(BUILDDIR)/texinfo info\n\t@echo \"makeinfo finished; the Info files are in $(BUILDDIR)/texinfo.\"\n\n.PHONY: gettext\ngettext:\n\t$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale\n\t@echo\n\t@echo \"Build finished. The message catalogs are in $(BUILDDIR)/locale.\"\n\n.PHONY: changes\nchanges:\n\t$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes\n\t@echo\n\t@echo \"The overview file is in $(BUILDDIR)/changes.\"\n\n.PHONY: linkcheck\nlinkcheck:\n\t$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck\n\t@echo\n\t@echo \"Link check complete; look for any errors in the above output \" \\\n\t      \"or in $(BUILDDIR)/linkcheck/output.txt.\"\n\n.PHONY: doctest\ndoctest:\n\t$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest\n\t@echo \"Testing of doctests in the sources finished, look at the \" \\\n\t      \"results in $(BUILDDIR)/doctest/output.txt.\"\n\n.PHONY: coverage\ncoverage:\n\t$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage\n\t@echo \"Testing of coverage in the sources finished, look at the \" \\\n\t      \"results in $(BUILDDIR)/coverage/python.txt.\"\n\n.PHONY: xml\nxml:\n\t$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml\n\t@echo\n\t@echo \"Build finished. The XML files are in $(BUILDDIR)/xml.\"\n\n.PHONY: pseudoxml\npseudoxml:\n\t$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml\n\t@echo\n\t@echo \"Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml.\"\n\n.PHONY: dummy\ndummy:\n\t$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy\n\t@echo\n\t@echo \"Build finished. Dummy builder generates no files.\"\n"
  },
  {
    "path": "docs/README.md",
    "content": "# MHCflurry documentation\n\nTo generate Sphinx documentation, from this directory run:\n\n```\n$ pip install -r requirements.txt  # for the first time you generate docs\n$ make generate html\n```\n\nDocumentation is written to the _build/ directory. These files should not be\nchecked into the repo.\n\nTo test example code:\n```\n$ make doctest \n```\n\nThen take a look at _build/doctest for detailed output.\n\n"
  },
  {
    "path": "docs/api.rst",
    "content": ".. _api-documentation:\n\nAPI Documentation\n=================\n\n.. include:: _build/mhcflurry.rst\n    :start-line: 2"
  },
  {
    "path": "docs/commandline_tools.rst",
    "content": "Command-line reference\n============================\n\nSee also the :ref:`tutorial <commandline_tutorial>`.\n\n.. _mhcflurry-predict:\n\n.. autoprogram:: mhcflurry.predict_command:parser\n    :prog: mhcflurry-predict\n\n.. _mhcflurry-predict-scan:\n\n.. autoprogram:: mhcflurry.predict_scan_command:parser\n    :prog: mhcflurry-predict-scan\n\n.. _mhcflurry-downloads:\n\n.. autoprogram:: mhcflurry.downloads_command:parser\n    :prog: mhcflurry-downloads\n\n.. _mhcflurry-class1-train-allele-specific-models:\n\n.. autoprogram:: mhcflurry.train_allele_specific_models_command:parser\n    :prog: mhcflurry-class1-train-allele-specific-models\n\n.. _mhcflurry-class1-select-allele-specific-models:\n\n.. autoprogram:: mhcflurry.select_allele_specific_models_command:parser\n    :prog: mhcflurry-class1-select-allele-specific-models\n\n.. _mhcflurry-class1-train-pan-allele-models:\n\n.. autoprogram:: mhcflurry.train_pan_allele_models_command:parser\n    :prog: mhcflurry-class1-train-pan-allele-models\n\n.. _mhcflurry-class1-select-pan-allele-models:\n\n.. autoprogram:: mhcflurry.select_pan_allele_models_command:parser\n    :prog: mhcflurry-class1-select-pan-allele-models\n\n.. _mhcflurry-class1-train-processing-models:\n\n.. autoprogram:: mhcflurry.train_processing_models_command:parser\n    :prog: mhcflurry-class1-train-processing-models\n\n.. _mhcflurry-class1-select-processing-models:\n\n.. autoprogram:: mhcflurry.select_processing_models_command:parser\n    :prog: mhcflurry-class1-select-processing-models\n\n.. _mhcflurry-class1-train-presentation-models:\n\n.. autoprogram:: mhcflurry.train_presentation_models_command:parser\n    :prog: mhcflurry-class1-train-presentation-models\n\n"
  },
  {
    "path": "docs/commandline_tutorial.rst",
    "content": ".. _commandline_tutorial:\n\nCommand-line tutorial\n=====================\n\n.. _downloading:\n\nDownloading models\n------------------\n\nMost users will use pre-trained MHCflurry models that we release. These models\nare distributed separately from the pip package and may be downloaded with the\n:ref:`mhcflurry-downloads` tool:\n\n.. code-block:: shell\n\n    $ mhcflurry-downloads fetch models_class1_presentation\n\nFiles downloaded with :ref:`mhcflurry-downloads` are stored in a platform-specific\ndirectory. To get the path to downloaded data, you can use:\n\n.. command-output:: mhcflurry-downloads path models_class1_presentation\n    :nostderr:\n\nWe also release a number of other \"downloads,\" such as curated training data and some\nexperimental models. To see what's available and what you have downloaded, run\n``mhcflurry-downloads info``.\n\nMost users will only need ``models_class1_presentation``, however, as the\npresentation predictor includes a peptide / MHC I binding affinity (BA) predictor\nas well as an antigen processing (AP) predictor.\n\n.. note::\n\n    The code we use for *generating* the downloads is in the\n    ``downloads_generation`` directory in the repository (https://github.com/openvax/mhcflurry/tree/master/downloads-generation)\n\n\nGenerating predictions\n----------------------\n\nThe :ref:`mhcflurry-predict` command generates predictions for individual peptides\n(see the next section for how to scan protein sequences for epitopes). By\ndefault it will use the pre-trained models you downloaded above. Other\nmodels can be used by specifying the ``--models`` argument.\n\nRunning:\n\n.. command-output::\n    mhcflurry-predict\n        --alleles HLA-A0201 HLA-A0301\n        --peptides SIINFEKL SIINFEKD SIINFEKQ\n        --out /tmp/predictions.csv\n    :nostderr:\n\nresults in a file like this:\n\n.. command-output::\n    cat /tmp/predictions.csv\n\nThe binding affinity predictions are given as affinities (KD) in nM in the\n``mhcflurry_affinity`` column. Lower values indicate stronger binders. A commonly-used\nthreshold for peptides with a reasonable chance of being immunogenic is 500 nM.\n\nThe ``mhcflurry_affinity_percentile`` gives the percentile of the affinity\nprediction among a large number of random peptides tested on that allele (range\n0 - 100). Lower is stronger. Two percent is a commonly-used threshold.\n\nThe last two columns give the antigen processing and presentation scores,\nrespectively. These range from 0 to 1 with higher values indicating more\nfavorable processing or presentation.\n\n.. note::\n\n    The processing predictor is experimental. It models allele-independent\n    effects that influence whether a\n    peptide will be detected in a mass spec experiment. The presentation score is\n    a simple logistic regression model that combines the (log) binding affinity\n    prediction with the processing score to give a composite prediction. The resulting\n    prediction may be useful for prioritizing potential epitopes, but no\n    thresholds have been established for what constitutes a \"high enough\"\n    presentation score.\n\nIn most cases you'll want to specify the input as a CSV file instead of passing\npeptides and alleles as commandline arguments. If you're relying on the\nprocessing or presentation scores, you may also want to pass the upstream and\ndownstream sequences of the peptides from their source proteins for potentially more\naccurate cleavage prediction. See the :ref:`mhcflurry-predict` docs.\n\n\nUsing the older, allele-specific models\n-------------------------------------------\n\nPrevious versions of MHCflurry (described in the 2018 paper) used models\ntrained on affinity measurements, one allele per model (i.e. allele-specific).\nMass spec datasets were incorporated in the model selection step.\n\nThese models are still available to use with the latest version of MHCflurry.\nTo download these predictors, run:\n\n.. code-block:: shell\n\n    $ mhcflurry-downloads fetch models_class1\n\nand specify ``--models`` when you call ``mhcflurry-predict``:\n\n\n.. code-block:: shell\n\n    $ mhcflurry-predict \\\n        --alleles HLA-A0201 HLA-A0301 \\\n        --peptides SIINFEKL SIINFEKD SIINFEKQ \\\n        --models \"$(mhcflurry-downloads path models_class1)/models\"\n        --out /tmp/predictions.csv\n\n\nScanning protein sequences for predicted MHC I ligands\n-------------------------------------------------\n\nStarting in version 1.6.0, MHCflurry supports scanning proteins for MHC-binding\npeptides using the ``mhcflurry-predict-scan`` command.\n\nWe'll generate predictions across ``example.fasta``, a FASTA file with two short\nsequences:\n\n.. literalinclude:: /example.fasta\n\nHere's the ``mhcflurry-predict-scan`` invocation to scan the proteins for\nbinders to either of two MHC I genotypes (using a 100 nM threshold):\n\n.. command-output::\n    mhcflurry-predict-scan\n        example.fasta\n        --alleles\n            HLA-A*02:01,HLA-A*03:01,HLA-B*57:01,HLA-B*45:01,HLA-C*02:02,HLA-C*07:02\n            HLA-A*01:01,HLA-A*02:06,HLA-B*44:02,HLA-B*07:02,HLA-C*01:02,HLA-C*03:01\n        --threshold-affinity 100\n    :nostderr:\n\nSee the :ref:`mhcflurry-predict-scan` docs for more options.\n\n\nFitting your own models\n-----------------------\n\nIf you have your own data and want to fit your own MHCflurry models, you have\na few options. If you have data for only one or a few MHC I alleles, the best\napproach is to use the\n:ref:`mhcflurry-class1-train-allele-specific-models` command to fit an\n\"allele-specific\" predictor, in which separate neural networks are used for\neach allele.\n\nTo call :ref:`mhcflurry-class1-train-allele-specific-models` you'll need some\ntraining data. The data we use for our released predictors can be downloaded with\n:ref:`mhcflurry-downloads`:\n\n.. code-block:: shell\n\n    $ mhcflurry-downloads fetch data_curated\n\nIt looks like this:\n\n.. command-output::\n    bzcat \"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\" | head -n 3\n    :shell:\n    :nostderr:\n\nHere's an example invocation to fit a predictor:\n\n.. code-block:: shell\n\n    $ mhcflurry-class1-train-allele-specific-models \\\n        --data curated_training_data.csv.bz2 \\\n        --hyperparameters hyperparameters.yaml \\\n        --min-measurements-per-allele 75 \\\n        --out-models-dir models\n\nThe ``hyperparameters.yaml`` file gives the list of neural network architectures\nto train models for. Here's an example specifying a single architecture:\n\n.. code-block:: yaml\n\n    - activation: tanh\n      dense_layer_l1_regularization: 0.0\n      dropout_probability: 0.0\n      early_stopping: true\n      layer_sizes: [8]\n      locally_connected_layers: []\n      loss: custom:mse_with_inequalities\n      max_epochs: 500\n      minibatch_size: 128\n      n_models: 4\n      output_activation: sigmoid\n      patience: 20\n      peptide_amino_acid_encoding: BLOSUM62\n      random_negative_affinity_max: 50000.0\n      random_negative_affinity_min: 20000.0\n      random_negative_constant: 25\n      random_negative_rate: 0.0\n      validation_split: 0.1\n\nThe available hyperparameters for binding predictors are defined in\n`~mhcflurry.Class1NeuralNetwork`. To see exactly how\nthese are used you will need to read the source code.\n\n.. note::\n\n    MHCflurry predictors are serialized to disk as many files in a directory. The\n    model training command above will write the models to the output directory specified by the\n    ``--out-models-dir`` argument. This directory has files like:\n\n    .. program-output::\n        ls \"$(mhcflurry-downloads path models_class1)/models\"\n        :shell:\n        :nostderr:\n        :ellipsis: 4,-4\n\n    The ``manifest.csv`` file gives metadata for all the models used in the predictor.\n    There will be a ``weights_...`` file for each model giving its weights\n    (the parameters for the neural network). The ``percent_ranks.csv`` stores a\n    histogram of model predictions for each allele over a large number of random\n    peptides. It is used for generating the percent ranks at prediction time.\n\nTo fit pan-allele models like the ones released with MHCflurry, you can use\na similar tool, :ref:`mhcflurry-class1-train-pan-allele-models`. You'll probably\nalso want to take a look at the scripts used to generate the production models,\nwhich are available in the *downloads-generation* directory in the MHCflurry\nrepository. See the scripts in the *models_class1_pan* subdirectory to see how the\nfitting and model selection was done for models currently distributed with MHCflurry.\n\n.. note::\n\n    The production MHCflurry models were fit using a cluster with several\n    dozen GPUs over a period of about two days. If you model select over fewer\n    architectures, however, it should be possible to fit a predictor using less\n    resources.\n\n\nEnvironment variables\n-------------------------------------------------\n\nMHCflurry behavior can be modified using these environment variables:\n\n``MHCFLURRY_DEFAULT_CLASS1_MODELS``\n    Path to models directory. If you call ``Class1AffinityPredictor.load()``\n    with no arguments, the models specified in this environment variable will be\n    used. If this environment variable is undefined, the downloaded models for\n    the current MHCflurry release are used.\n\n``MHCFLURRY_OPTIMIZATION_LEVEL``\n    The pan-allele models can be somewhat slow. As an optimization, when this\n    variable is greater than 0 (default is 1), we merge the pan-allele models in\n    the ensemble into a single combined network. In our experiments\n    it gives about a 30% speed improvement. It has no effect on allele-specific\n    models. Set this variable to 0 to disable this behavior. This may be helpful\n    if you are running out of memory using the pan-allele models.\n\n\n``MHCFLURRY_DEFAULT_PREDICT_BATCH_SIZE``\n    For large prediction tasks, it can be helpful to increase the prediction batch\n    size, which is set by this environment variable (default is 4096). This\n    affects both allele-specific and pan-allele predictors. It can have large\n    effects on performance. Alternatively, if you are running out of memory,\n    you can try decreasing the batch size.\n\n\n"
  },
  {
    "path": "docs/conf.py",
    "content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n#\n# MHCflurry documentation build configuration file, created by\n# sphinx-quickstart on Sun Dec 10 20:25:16 2017.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport sys\nimport os\nimport re\nimport textwrap\nimport logging\nimport subprocess\n\nif os.environ.get(\"READTHEDOCS\"):\n    # For rtd builds, call \"make generate\" first.\n    subprocess.check_call(\"make generate\", shell=True)\n\n# Hack added by tim for bug in autoprogram extension under Python 2.\nfrom sphinx.util.pycompat import indent  # pylint: disable=import-error\ntextwrap.indent = indent\n\n# Disable logging (added by tim)\nlogging.disable(logging.ERROR)\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\nsys.path.insert(0, os.path.abspath('.'))\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n    'sphinx.ext.autodoc',\n    'sphinx.ext.doctest',\n    'sphinx.ext.coverage',\n    'sphinx.ext.ifconfig',\n    'sphinx.ext.viewcode',\n    'sphinx.ext.githubpages',\n    'numpydoc',\n    'sphinxcontrib.programoutput',\n    'sphinxcontrib.autoprogram',\n    'sphinx.ext.githubpages',\n]\n\ndoctest_global_setup = '''\nimport logging\nlogging.getLogger('matplotlib').disabled = True\nimport numpy\nimport pandas\nimport mhcflurry\npandas.set_option('max_columns', 20)\npandas.set_option('display.expand_frame_repr', False)\n'''\n\ndoctest_test_doctest_blocks = ''\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The encoding of source files.\n#source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = 'MHCflurry'\ncopyright = 'Timothy O\\'Donnell'\nauthor = 'Timothy O\\'Donnell'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n\n# The short X.Y version.\n# Added by Tim: reading version from mhcflurry __init__.py as in setup.py\nwith open('../mhcflurry/version.py', 'r') as f:\n    version = re.search(\n        r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]',\n        f.read(),\n        re.MULTILINE).group(1)\n\n# The full version, including alpha/beta/rc tags.\nrelease = version\n\n# Added by tim\nautodoc_member_order = 'bysource'\nautoclass_content = 'both'\n\n# Added by tim\nsuppress_warnings = ['image.nonlocal_uri']\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#today = ''\n# Else, today_fmt is used as the format for a strftime call.\n#today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\nexclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\ndefault_role = 'py:obj'\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n#modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n#keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n# Added by Tim\n# http://stackoverflow.com/questions/12206334/sphinx-autosummary-toctree-contains-reference-to-nonexisting-document-warnings\nnumpydoc_show_class_members = False\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages.  See the documentation for\n# a list of builtin themes.\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further.  For a list of options available for each theme, see the\n# documentation.\n#html_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\n#html_theme_path = []\n\n# The name for this set of Sphinx documents.\n# \"<project> v<release> documentation\" by default.\n#html_title = 'MHCflurry v1.0.0'\n\n# A shorter title for the navigation bar.  Default is the same as html_title.\n#html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n#html_logo = None\n\n# The name of an image file (relative to this directory) to use as a favicon of\n# the docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\n# html_static_path = ['_static']\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#html_extra_path = []\n\n# If not None, a 'Last updated on:' timestamp is inserted at every page\n# bottom, using the given strftime format.\n# The empty string is equivalent to '%b %d, %Y'.\nhtml_last_updated_fmt = \"\"\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#html_additional_pages = {}\n\n# If false, no module index is generated.\nhtml_domain_indices = False\n\n# If false, no index is generated.\nhtml_use_index = False\n\n# If true, the index is split into individual pages for each letter.\n#html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it.  The value of this option must be the\n# base URL from which the finished HTML is served.\n#html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n#html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n#   'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'\n#   'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'\n#html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# 'ja' uses this config value.\n# 'zh' user can custom change `jieba` dictionary path.\n#html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n#html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'MHCflurrydoc'\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n# The paper size ('letterpaper' or 'a4paper').\n#'papersize': 'letterpaper',\n\n# The font size ('10pt', '11pt' or '12pt').\n#'pointsize': '10pt',\n\n# Additional stuff for the LaTeX preamble.\n#'preamble': '',\n\n# Latex figure (float) alignment\n#'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n#  author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n    (master_doc, 'MHCflurry.tex', 'MHCflurry Documentation',\n     'Timothy O\\'Donnell', 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#latex_use_parts = False\n\n# If true, show page references after internal links.\n#latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#latex_appendices = []\n\n# If false, no module index is generated.\n#latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n    (master_doc, 'mhcflurry', 'MHCflurry Documentation',\n     [author], 1)\n]\n\n# If true, show URL addresses after external links.\n#man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n#  dir menu entry, description, category)\ntexinfo_documents = [\n    (master_doc, 'MHCflurry', 'MHCflurry Documentation',\n     author, 'MHCflurry', 'One line description of project.',\n     'Miscellaneous'),\n]\n\n# Documents to append as an appendix to all manuals.\n#texinfo_appendices = []\n\n# If false, no module index is generated.\n#texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#texinfo_no_detailmenu = False\n"
  },
  {
    "path": "docs/doctest.sh",
    "content": "#!/bin/bash\n\nsphinx-build -b doctest -d _build/doctrees . _build/doctest\nRETVAL=$?\necho doctest returned $RETVAL\ncat _build/doctest/output.txt\nexit $RETVAL\n"
  },
  {
    "path": "docs/example.fasta",
    "content": ">protein1\nMSSSSTPVCPNGPGNCQV\n>protein2\nMVENKRLLEGMEMIFGQVIPGA\n"
  },
  {
    "path": "docs/index.rst",
    "content": "MHCflurry documentation\n=====================================\n\n.. toctree::\n   :maxdepth: 3\n\n   intro\n   commandline_tutorial\n   python_tutorial\n   model-info/allele_motifs\n   commandline_tools\n   api\n\n"
  },
  {
    "path": "docs/intro.rst",
    "content": "Introduction and setup\n=======================\n\nMHCflurry is an open source package for peptide/MHC I binding affinity prediction. It\naims to provide competitive accuracy with a fast and documented implementation.\n\nYou can download pre-trained MHCflurry models fit to mass spec-identified MHC I\nligands and peptide/MHC affinity measurements deposited in IEDB (plus a few other\nsources) or train a MHCflurry predictor on your own data.\n\nStarting in version 1.6.0, the default MHCflurry binding affinity predictors\nare \"pan-allele\" models that support most sequenced MHC I alleles across humans\nand a few other species (about 14,000 alleles in total). This version also\nintroduces two experimental predictors, an \"antigen processing\" predictor\nthat attempts to model MHC allele-independent effects such as proteosomal\ncleavage and a \"presentation\" predictor that integrates processing predictions\nwith binding affinity predictions to give a composite \"presentation score.\" Both\nmodels are trained on mass spec-identified MHC ligands.\n\nMHCflurry supports Python 3.10+. It uses the `PyTorch <https://pytorch.org/>`__\nneural network library. GPUs and Apple Silicon (MPS) may optionally be used for\na speed improvement and are auto-detected.\n\nIf you find MHCflurry useful in your research, please cite:\n\n    T. J. O'Donnell, et al. \"MHCflurry 2.0: Improved pan-allele prediction of MHC\n    I-presented peptides by incorporating antigen processing,\"\n    *Cell Systems*, 2020. https://doi.org/10.1016/j.cels.2020.06.010\n\n    T. J. O'Donnell, et al., \"MHCflurry: Open-Source Class I MHC Binding Affinity\n    Prediction,\" *Cell Systems*, 2018. https://doi.org/10.1016/j.cels.2018.05.014\n\nIf you have questions or encounter problems, please file an issue at the\nMHCflurry github repo: https://github.com/openvax/mhcflurry\n\n\nInstallation (pip)\n-------------------\n\nInstall the package:\n\n.. code-block:: shell\n\n    $ pip install mhcflurry\n\nThen download our datasets and trained models:\n\n.. code-block:: shell\n\n    $ mhcflurry-downloads fetch\n\nFrom a checkout you can run the unit tests with:\n\n.. code-block:: shell\n\n    $ pip install pytest\n    $ pytest\n\n\nUsing conda\n-------------\n\nYou can alternatively get up and running with a `conda <https://conda.io/docs/>`__\nenvironment as follows.\n\n.. code-block:: shell\n\n    $ conda create -q -n mhcflurry-env python=3.10\n    $ source activate mhcflurry-env\n\nThen continue as above:\n\n.. code-block:: shell\n\n    $ pip install mhcflurry\n    $ mhcflurry-downloads fetch\n"
  },
  {
    "path": "docs/python_tutorial.rst",
    "content": "Python library tutorial\n=======================\n\nThe MHCflurry Python API exposes additional options and features beyond those\nsupported by the commandline tools and can be more convenient for interactive\nanalyses and bioinformatic pipelines. This tutorial gives a basic overview\nof the most important functionality. See the :ref:`API-documentation` for further\ndetails.\n\nLoading a predictor\n----------------------------------\n\nMost prediction tasks can be performed using the\n`~mhcflurry.Class1PresentationPredictor` class, which provides a programmatic API\nto the functionality in the :ref:`mhcflurry-predict` and\n:ref:`mhcflurry-predict-scan` commands.\n\nInstances of `~mhcflurry.Class1PresentationPredictor` wrap a\n`~mhcflurry.Class1AffinityPredictor` to generate binding affinity predictions\nand a `~mhcflurry.Class1ProcessingPredictor` to generate antigen processing\npredictions. The presentation score is computed using a logistic regression\nmodel over binding affinity and processing predictions.\n\nUse the `~mhcflurry.Class1PresentationPredictor.load` static method to load a\ntrained predictor from disk. With no arguments this method will load the predictor\nreleased with MHCflurry (see :ref:`downloading`\\ ). If you pass a path to a\nmodels directory, then it will load that predictor instead.\n\n.. doctest::\n\n    >>> from mhcflurry import Class1PresentationPredictor\n    >>> predictor = Class1PresentationPredictor.load()\n    >>> predictor.supported_alleles[:5]\n    ['Atbe-B*01:01', 'Atbe-E*03:01', 'Atbe-G*03:01', 'Atbe-G*03:02', 'Atbe-G*06:01']\n\nPredicting for individual peptides\n----------------------------------\n\nTo generate predictions for individual peptides, we can use the\n`~mhcflurry.Class1AffinityPredictor.predict` method of the `~mhcflurry.Class1PresentationPredictor`,\nloaded above. This method returns a `pandas.DataFrame` with binding affinity, processing, and presentation\npredictions:\n\n.. doctest::\n\n    >>> predictor.predict(\n    ...     peptides=[\"SIINFEKL\", \"NLVPMVATV\"],\n    ...     alleles=[\"HLA-A0201\", \"HLA-A0301\"],\n    ...     verbose=0)\n         peptide  peptide_num sample_name      affinity best_allele  processing_score  presentation_score\n    0   SIINFEKL            0     sample1  12906.786173   HLA-A0201          0.101473            0.012503\n    1  NLVPMVATV            1     sample1     15.038358   HLA-A0201          0.676289            0.975463\n\nHere, the list of alleles is taken to be an individual's MHC I genotype (i.e. up\nto 6 alleles), and the strongest binder across alleles for each peptide is\nreported.\n\n.. note::\n\n    MHCflurry normalizes allele names using the `mhcgnomes <https://github.com/til-unc/mhcgnomes>`__\n    package. Names like ``HLA-A0201`` or ``A*02:01`` will be\n    normalized to ``HLA-A*02:01``, so most naming conventions can be used\n    with methods such as `~mhcflurry.Class1PresentationPredictor.predict`.\n\nIf you have multiple sample genotypes, you can pass a dict, where the\nkeys are arbitrary sample names:\n\n.. doctest::\n\n    >>> predictor.predict(\n    ...     peptides=[\"KSEYMTSWFY\", \"NLVPMVATV\"],\n    ...     alleles={\n    ...        \"sample1\": [\"A0201\", \"A0301\", \"B0702\", \"B4402\", \"C0201\", \"C0702\"],\n    ...        \"sample2\": [\"A0101\", \"A0206\", \"B5701\", \"C0202\"],\n    ...     },\n    ...     verbose=0)\n          peptide  peptide_num sample_name      affinity best_allele  processing_score  presentation_score\n    0  KSEYMTSWFY            0     sample1  16737.745268       A0301          0.381632            0.026550\n    1   NLVPMVATV            1     sample1     15.038358       A0201          0.676289            0.975463\n    2  KSEYMTSWFY            0     sample2     62.540779       A0101          0.381632            0.796731\n    3   NLVPMVATV            1     sample2     15.765500       A0206          0.676289            0.974439\n\nHere the strongest binder for each sample / peptide pair is returned.\n\nMany users will focus on the binding affinity predictions, as the\nprocessing and presentation predictions are experimental. If you do use the latter\nscores, however, when available you should provide the upstream (N-flank)\nand downstream (C-flank) sequences from the source proteins of the peptides for\na small boost in accuracy. To do so, specify the ``n_flank`` and ``c_flank``\narguments, which give the flanking sequences for the corresponding peptides:\n\n.. doctest::\n\n    >>> predictor.predict(\n    ...     peptides=[\"KSEYMTSWFY\", \"NLVPMVATV\"],\n    ...     n_flanks=[\"NNNNNNN\", \"SSSSSSSS\"],\n    ...     c_flanks=[\"CCCCCCCC\", \"YYYAAAA\"],\n    ...     alleles={\n    ...        \"sample1\": [\"A0201\", \"A0301\", \"B0702\", \"B4402\", \"C0201\", \"C0702\"],\n    ...        \"sample2\": [\"A0101\", \"A0206\", \"B5701\", \"C0202\"],\n    ...     },\n    ...     verbose=0)\n          peptide   n_flank   c_flank  peptide_num sample_name      affinity best_allele  processing_score  presentation_score\n    0  KSEYMTSWFY   NNNNNNN  CCCCCCCC            0     sample1  16737.745268       A0301          0.605816            0.056190\n    1   NLVPMVATV  SSSSSSSS   YYYAAAA            1     sample1     15.038358       A0201          0.824994            0.986719\n    2  KSEYMTSWFY   NNNNNNN  CCCCCCCC            0     sample2     62.540779       A0101          0.605816            0.897493\n    3   NLVPMVATV  SSSSSSSS   YYYAAAA            1     sample2     15.765500       A0206          0.824994            0.986155\n\nScanning protein sequences\n--------------------------\n\nThe `~mhcflurry.Class1PresentationPredictor.predict_sequences` method supports\nscanning protein sequences for MHC ligands. Here's an example to identify all\npeptides with a predicted binding affinity of 500 nM or tighter to any allele\nacross two sample genotypes and two short peptide sequences.\n\n.. doctest::\n\n    >>> predictor.predict_sequences(\n    ...    sequences={\n    ...        'protein1': \"MDSKGSSQKGSRLLLLLVVSNLL\",\n    ...        'protein2': \"SSLPTPEDKEQAQQTHH\",\n    ...    },\n    ...    alleles={\n    ...        \"sample1\": [\"A0201\", \"A0301\", \"B0702\"],\n    ...        \"sample2\": [\"A0101\", \"C0202\"],\n    ...    },\n    ...    result=\"filtered\",\n    ...    comparison_quantity=\"affinity\",\n    ...    filter_value=500,\n    ...    verbose=0)\n      sequence_name  pos     peptide         n_flank     c_flank sample_name    affinity best_allele  affinity_percentile  processing_score  presentation_score\n    0      protein1   13   LLLLVVSNL   MDSKGSSQKGSRL           L     sample1   38.206225       A0201             0.380125          0.017644            0.571060\n    1      protein1   14   LLLVVSNLL  MDSKGSSQKGSRLL                 sample1   42.243472       A0201             0.420250          0.090984            0.619213\n    2      protein1    5   SSQKGSRLL           MDSKG   LLLVVSNLL     sample2   66.749223       C0202             0.803375          0.383608            0.774468\n    3      protein1    6   SQKGSRLLL          MDSKGS    LLVVSNLL     sample2  178.033467       C0202             1.820000          0.275019            0.482206\n    4      protein1   13  LLLLVVSNLL   MDSKGSSQKGSRL                 sample1  202.208167       A0201             1.112500          0.058782            0.261320\n    5      protein1   12  LLLLLVVSNL    MDSKGSSQKGSR           L     sample1  202.506582       A0201             1.112500          0.010025            0.225648\n    6      protein2    0   SSLPTPEDK                    EQAQQTHH     sample1  335.529377       A0301             1.011750          0.010443            0.156798\n    7      protein2    0   SSLPTPEDK                    EQAQQTHH     sample2  353.451759       C0202             2.674250          0.010443            0.150753\n    8      protein1    8   KGSRLLLLL        MDSKGSSQ      VVSNLL     sample2  410.327286       C0202             2.887000          0.121374            0.194081\n    9      protein1    5    SSQKGSRL           MDSKG  LLLLVVSNLL     sample2  477.285937       C0202             3.107375          0.111982            0.168572\n\nWhen using ``predict_sequences``, the flanking sequences for each peptide are\nautomatically included in the processing and presentation predictions.\n\nSee the documentation for `~mhcflurry.Class1PresentationPredictor` for other\nuseful methods.\n\n\nLower level interfaces\n----------------------------------\n\nThe `~mhcflurry.Class1PresentationPredictor` delegates to a\n`~mhcflurry.Class1AffinityPredictor` instance for binding affinity predictions.\nIf all you need are binding affinities, you can use this instance directly.\n\nHere's an example:\n\n.. doctest::\n\n    >>> from mhcflurry import Class1AffinityPredictor\n    >>> predictor = Class1AffinityPredictor.load()\n    >>> predictor.predict_to_dataframe(allele=\"HLA-A0201\", peptides=[\"SIINFEKL\", \"SIINFEQL\"])\n        peptide     allele    prediction  prediction_low  prediction_high  prediction_percentile\n    0  SIINFEKL  HLA-A0201  12906.786173     8829.460289     18029.923061               6.566375\n    1  SIINFEQL  HLA-A0201  13025.300796     9050.056312     18338.004869               6.623625\n\nThe ``prediction_low`` and ``prediction_high`` fields give the 5-95 percentile\npredictions across the models in the ensemble. This detailed information is not\navailable through the higher-level `~mhcflurry.Class1PresentationPredictor`\ninterface.\n\nUnder the hood, `Class1AffinityPredictor` itself delegates to an ensemble of\nof `~mhcflurry.Class1NeuralNetwork` instances, which implement the neural network\nmodels used for prediction. To fit your own affinity prediction models, call\n`~mhcflurry.Class1NeuralNetwork.fit`.\n\nYou can similarly use `~mhcflurry.Class1ProcessingPredictor` directly for\nantigen processing prediction, and there is a low-level\n`~mhcflurry.Class1ProcessingNeuralNetwork` with a `~mhcflurry.Class1ProcessingNeuralNetwork.fit` method.\n\nSee the API documentation of these classes for details."
  },
  {
    "path": "docs/requirements.txt",
    "content": "sphinx\nsphinxcontrib-programoutput\nsphinxcontrib-autoprogram\nsphinx-rtd-theme\nnumpydoc\npypandoc\npydot\ntabulate\nlogomaker\ntqdm\n"
  },
  {
    "path": "downloads-generation/README.md",
    "content": "# Downloads generation\n\nThis directory contains code and instructions needed to *generate* the datasets and trained models published with MHCflurry.\n\nIf you are only looking to download datasets and trained models, you do not need to use any of this. Just run `mhcflurry-downloads fetch` to download the standard models and datasets."
  },
  {
    "path": "downloads-generation/allele_sequences/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Create allele sequences (sometimes referred to as pseudosequences) by\n# performing a global alignment across all MHC amino acid sequences we can get\n# our hands on.\n#\n# Requires: clustalo, wget\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=allele_sequences\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\nexport PYTHONUNBUFFERED=1\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\nwhich clustalo\nclustalo --version\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\ncp $SCRIPT_DIR/make_allele_sequences.py .\ncp $SCRIPT_DIR/select_alleles_to_disambiguate.py .\ncp $SCRIPT_DIR/filter_sequences.py .\n\ncp $SCRIPT_DIR/class1_pseudosequences.csv .\n\ncp $SCRIPT_ABSOLUTE_PATH .\n\n# Generate sequences\n# Training data is used to decide which additional positions to include in the\n# allele sequences to differentiate alleles that have identical traditional\n# pseudosequences but have associated training data\nTRAINING_DATA=\"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\"\n\npython select_alleles_to_disambiguate.py \\\n    \"$TRAINING_DATA\" \\\n    --min-count 1000 \\\n    --out training_data.alleles.txt\n\n# Human\nwget -q ftp://ftp.ebi.ac.uk/pub/databases/ipd/imgt/hla/fasta/A_prot.fasta\nwget -q ftp://ftp.ebi.ac.uk/pub/databases/ipd/imgt/hla/fasta/B_prot.fasta\nwget -q ftp://ftp.ebi.ac.uk/pub/databases/ipd/imgt/hla/fasta/C_prot.fasta\nwget -q ftp://ftp.ebi.ac.uk/pub/databases/ipd/imgt/hla/fasta/E_prot.fasta\nwget -q ftp://ftp.ebi.ac.uk/pub/databases/ipd/imgt/hla/fasta/F_prot.fasta\nwget -q ftp://ftp.ebi.ac.uk/pub/databases/ipd/imgt/hla/fasta/G_prot.fasta\n\n# Mouse\nwget -q https://www.uniprot.org/uniprot/P01899.fasta  # H-2 Db\nwget -q https://www.uniprot.org/uniprot/P01900.fasta  # H-2 Dd\nwget -q https://www.uniprot.org/uniprot/P14427.fasta  # H-2 Dp\nwget -q https://www.uniprot.org/uniprot/P14426.fasta  # H-2 Dk\nwget -q https://www.uniprot.org/uniprot/Q31145.fasta  # H-2 Dq\n\nwget -q https://www.uniprot.org/uniprot/P01901.fasta  # H-2 Kb\nwget -q https://www.uniprot.org/uniprot/P01902.fasta  # H-2 Kd\nwget -q https://www.uniprot.org/uniprot/P04223.fasta  # H-2 Kk\nwget -q https://www.uniprot.org/uniprot/P14428.fasta  # H-2 Kq\n\nwget -q https://www.uniprot.org/uniprot/P01897.fasta  # H-2 Ld\nwget -q https://www.uniprot.org/uniprot/Q31151.fasta  # H-2 Lq\n\n# Various\nwget -q ftp://ftp.ebi.ac.uk/pub/databases/ipd/mhc/MHC_prot.fasta\n\npython filter_sequences.py *.fasta --out class1.fasta\n\ntime clustalo -i class1.fasta -o class1.aligned.fasta\n\ntime python make_allele_sequences.py \\\n    class1.aligned.fasta \\\n    --recapitulate-sequences class1_pseudosequences.csv \\\n    --differentiate-alleles training_data.alleles.txt \\\n    --out-csv allele_sequences.csv\n\ntime python make_allele_sequences.py \\\n    class1.aligned.fasta \\\n    --recapitulate-sequences class1_pseudosequences.csv \\\n    --out-csv allele_sequences.no_differentiation.csv\n\n# Cleanup\ngzip -f class1.fasta\ngzip -f class1.aligned.fasta\nrm *.fasta\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/allele_sequences/class1_pseudosequences.csv",
    "content": "allele,pseudosequence\nBoLA-100901,YYSMYREISENVYGSNLYLLYRDYTWEYLNYRWY\nBoLA-100902,YYSEYREISENVYESNLYLLYRDYTWEYLNYRWY\nBoLA-101901,YHTKYREISENVYGSNLYYDYDYYTWAVFNYRGY\nBoLA-102001,YHTKYREISENVYGSNLYFLYMDYTWAVFNYRGY\nBoLA-102101,YYTKYREISENVYGSNLYFQFRYYTWADFNYEGY\nBoLA-102301,YYSEYREISENVYESNLYIAYSDYTWEYLNYRWY\nBoLA-102801,YYTKYREISEKLYENTLYLQFRYYTWADFNYEWY\nBoLA-102901,YYTRYREISENLYKNTAYITFMYYTWANENYRGY\nBoLA-103101,YYTKYDEISENLYKNTLYIAFRDYTWAYLNYTWY\nBoLA-103102,YYTKYDEISENLYKDTLYIAFRDYTWAYLNYTWY\nBoLA-104201,YHTKYDEISENLYKDTLYIAYRDYTWEYLNYRGY\nBoLA-104901,YYAEYREISDTSFVGTLYIEYEYYTWAYLNYEGY\nBoLA-106101,YYTIYREISENVYESNLYFRYDFYTWADFNYRWY\nBoLA-106701,YYAMYEMDAEDRSLCTLYFQFTFYTWAAFNYTWY\nBoLA-107401,YYTKYREISENLYKNTAYLRFSFYTWAAENYRGY\nBoLA-1:00901,YYSMYREISENVYGSNLYLLYRDYTWEYLNYRWY\nBoLA-1:00902,YYSEYREISENVYESNLYLLYRDYTWEYLNYRWY\nBoLA-1:01901,YHTKYREISENVYGSNLYYDYDYYTWAVFNYRGY\nBoLA-1:02001,YHTKYREISENVYGSNLYFLYMDYTWAVFNYRGY\nBoLA-1:02101,YYTKYREISENVYGSNLYFQFRYYTWADFNYEGY\nBoLA-1:02301,YYSEYREISENVYESNLYIAYSDYTWEYLNYRWY\nBoLA-1:02801,YYTKYREISEKLYENTLYLQFRYYTWADFNYEWY\nBoLA-1:02901,YYTRYREISENLYKNTAYITFMYYTWANENYRGY\nBoLA-1:03101,YYTKYDEISENLYKNTLYIAFRDYTWAYLNYTWY\nBoLA-1:03102,YYTKYDEISENLYKDTLYIAFRDYTWAYLNYTWY\nBoLA-1:04201,YHTKYDEISENLYKDTLYIAYRDYTWEYLNYRGY\nBoLA-1:04901,YYAEYREISDTSFVGTLYIEYEYYTWAYLNYEGY\nBoLA-1:06101,YYTIYREISENVYESNLYFRYDFYTWADFNYRWY\nBoLA-1:06701,YYAMYEMDAEDRSLCTLYFQFTFYTWAAFNYTWY\nBoLA-1:07401,YYTKYREISENLYKNTAYLRFSFYTWAAENYRGY\nBoLA-200501,YYAEYRNIYDTIFVDTLYIAYWFYTWAAWNYEWY\nBoLA-200601,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWY\nBoLA-200602,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWH\nBoLA-200801,YLIMYRENSETTFANTAYVEYMDYTWADWNYRWY\nBoLA-200802,YLIMYRENSETTFANTAYVEYMDYTWADWNYRGY\nBoLA-201201,YYATYRENFDTTFVDTLYIAYRDYTWAEHNYTWY\nBoLA-201601,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEGY\nBoLA-201602,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWY\nBoLA-201801,YYADYRNIYDTIFANTAYFEYMFYTWAEQNYRGY\nBoLA-201802,YYADYRNIYDTIFANTAYFEYMFYTWAEQNYRGY\nBoLA-202201,YHSEYEQIVDTSFVGTLYLLYEDYTRAALNYTGY\nBoLA-202501,YSAEYRNIYDTTFVYALYLWSWFYTWAAENYRGY\nBoLA-202601,YYAEYREISETTFVDTLYIEYEYYTWAYLNYRGY\nBoLA-202602,YYAEYREISETTFVDTLYIEYEYYTWAYLNYRGY\nBoLA-202603,YYAEYREISETTFVDTLYIEYEYYTWAYLNYRGY\nBoLA-203001,YYSEYRNIYDTNFVSNLYLWSWFYTWANENYEWY\nBoLA-203202,YYATYRENLGATFVDTLYIEYRDYTWAYLNYTWY\nBoLA-204301,YSEMYRERAGNTFVNTLYIWYRDYTWAVFNYLGY\nBoLA-204401,YYAMYEEKADTTFVDTLYIAYRDYTWAVFNYLGY\nBoLA-204402,YYAMYEEKADTTFVDTLYIWYRDYTWAVFNYLGY\nBoLA-204501,YYATYRENLDTTFVDTLYIEYRDYTWAEFNYLGY\nBoLA-204601,YSEMYRERAGNTFVNTLYIWYRDYTWAEQNYTWY\nBoLA-204701,YSEMYQERAGNTFVDTLYLWYMDYTWAEQNYTWY\nBoLA-204801,YYSEYEQIVDTSFVGTLYLLYMDYTRAAQNYRGY\nBoLA-205401,YYIMYQENSGATFANTLYFWYWFYTWANENYRGY\nBoLA-205501,YYAEYREISETTFVDSLYIAYRDYTWAYLNYRGY\nBoLA-205601,YYATYQENFDATFANTLYFLSTYYTWEAHNYRGY\nBoLA-205701,YYIMYREISETTFVDTLYIEYDFYTWEYLNYRGY\nBoLA-206001,YSAEYRNIYDTTFVYTLYLWSWFYTWANGNYEGY\nBoLA-206201,YYATYQEIQENTFANTLYIEYRDYTWAYFNYRWY\nBoLA-206901,YYSEYEQIVDTSFVNTLYLWYRDYTWEAENYRWY\nBoLA-207001,YYATYRENLDATFVNTLYLWYRDYTWAERNYRWY\nBoLA-207101,YYATYRENLGATFVDTLYIAYSDYTWAEFNYRGY\nBoLA-2:00501,YYAEYRNIYDTIFVDTLYIAYWFYTWAAWNYEWY\nBoLA-2:00601,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWY\nBoLA-2:00602,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWH\nBoLA-2:00801,YLIMYRENSETTFANTAYVEYMDYTWADWNYRWY\nBoLA-2:00802,YLIMYRENSETTFANTAYVEYMDYTWADWNYRGY\nBoLA-2:01201,YYATYRENFDTTFVDTLYIAYRDYTWAEHNYTWY\nBoLA-2:01601,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEGY\nBoLA-2:01602,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWY\nBoLA-2:01801,YYADYRNIYDTIFANTAYFEYMFYTWAEQNYRGY\nBoLA-2:01802,YYADYRNIYDTIFANTAYFEYMFYTWAEQNYRGY\nBoLA-2:02201,YHSEYEQIVDTSFVGTLYLLYEDYTRAALNYTGY\nBoLA-2:02501,YSAEYRNIYDTTFVYALYLWSWFYTWAAENYRGY\nBoLA-2:02601,YYAEYREISETTFVDTLYIEYEYYTWAYLNYRGY\nBoLA-2:02602,YYAEYREISETTFVDTLYIEYEYYTWAYLNYRGY\nBoLA-2:02603,YYAEYREISETTFVDTLYIEYEYYTWAYLNYRGY\nBoLA-2:03001,YYSEYRNIYDTNFVSNLYLWSWFYTWANENYEWY\nBoLA-2:03202,YYATYRENLGATFVDTLYIEYRDYTWAYLNYTWY\nBoLA-2:04301,YSEMYRERAGNTFVNTLYIWYRDYTWAVFNYLGY\nBoLA-2:04401,YYAMYEEKADTTFVDTLYIAYRDYTWAVFNYLGY\nBoLA-2:04402,YYAMYEEKADTTFVDTLYIWYRDYTWAVFNYLGY\nBoLA-2:04501,YYATYRENLDTTFVDTLYIEYRDYTWAEFNYLGY\nBoLA-2:04601,YSEMYRERAGNTFVNTLYIWYRDYTWAEQNYTWY\nBoLA-2:04701,YSEMYQERAGNTFVDTLYLWYMDYTWAEQNYTWY\nBoLA-2:04801,YYSEYEQIVDTSFVGTLYLLYMDYTRAAQNYRGY\nBoLA-2:05401,YYIMYQENSGATFANTLYFWYWFYTWANENYRGY\nBoLA-2:05501,YYAEYREISETTFVDSLYIAYRDYTWAYLNYRGY\nBoLA-2:05601,YYATYQENFDATFANTLYFLSTYYTWEAHNYRGY\nBoLA-2:05701,YYIMYREISETTFVDTLYIEYDFYTWEYLNYRGY\nBoLA-2:06001,YSAEYRNIYDTTFVYTLYLWSWFYTWANGNYEGY\nBoLA-2:06201,YYATYQEIQENTFANTLYIEYRDYTWAYFNYRWY\nBoLA-2:06901,YYSEYEQIVDTSFVNTLYLWYRDYTWEAENYRWY\nBoLA-2:07001,YYATYRENLDATFVNTLYLWYRDYTWAERNYRWY\nBoLA-2:07101,YYATYRENLGATFVDTLYIAYSDYTWAEFNYRGY\nBoLA-300101,YSEMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-300102,YSSMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-300103,YSIMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-300201,YLEMYQEKAGNFFVSNLYLLSMFYSMAEQNYRWY\nBoLA-300401,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-300402,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-300403,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-301001,YYSEYRNIYDTTFVDTLYLEYEYYSVAEFNYRGY\nBoLA-301101,YSEMYQEKAGTTFANIAYFWYMYYTWAEQNYTWY\nBoLA-301701,YSEMYRERAGNIFVSNLYFWYEYYTWAAQNYRWY\nBoLA-301702,YSEMYRERAGNIFVSNLYFWYMYYTWAAQNYRWY\nBoLA-301703,YSEMYRERAGNIFVSNLYFWYMYYTWAEQNYRWY\nBoLA-302701,YSEMYRNNAGNSFVGTLYLWSMYYTWEYQNYEWH\nBoLA-302702,YSEMYRNNAGNSFVGTLYLWSMYYTWEYQNYEWH\nBoLA-303501,YYNMYQENAGNTFVGTLYLWSEFYTWAAHNYTWY\nBoLA-303601,YYAMYRNNADATFVNTLYFLYEYYTVADHNYRWY\nBoLA-303701,YSEMYRNNAGNSFVGTLYLLYMDYSRAVQNYRWY\nBoLA-303801,YNEMYRNNAGNDSVGTLYLWYMYYSMAVQNYTWY\nBoLA-305001,YSEMYRNNAGNTFGSNLYFLYTYYTWAEWNYTWH\nBoLA-305002,YSEMYRNNAGNTFGSNLYFWYMYYTWAEQNYTWH\nBoLA-305101,YSEMYRERAGNTFVNTLYIWYRDYTWAAENYTWY\nBoLA-305201,YYSMYRENSDTGFVDTLYLLYTYYSVAVQNYRWY\nBoLA-305301,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-305801,YSEMYRERAGNTFVGTLYLWYMDYSRAVQNYRWY\nBoLA-305901,YSEMYRNNAGNSFVGTLYLWSMFYTWEYQNYRWH\nBoLA-306501,YSEMYQEKAGTSSVGTLYLAYMFYSMAVQNYEWY\nBoLA-306601,YYEMYQEKADTTFVDTLYLLYTYYSMAEFNYTWY\nBoLA-306602,YYEMYQEKADTTFVDTLYLLYTFYSMAEFNYTWY\nBoLA-306801,YSIVYQNNAGTTFANTLYLLYMYYTWAAHNYEWY\nBoLA-307301,YYIIYQEISDTSFVSNLYLWYTYYSMAVQNYEWY\nBoLA-3:00101,YSEMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-3:00102,YSSMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-3:00103,YSIMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-3:00201,YLEMYQEKAGNFFVSNLYLLSMFYSMAEQNYRWY\nBoLA-3:00401,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-3:00402,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-3:00403,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-3:01001,YYSEYRNIYDTTFVDTLYLEYEYYSVAEFNYRGY\nBoLA-3:01101,YSEMYQEKAGTTFANIAYFWYMYYTWAEQNYTWY\nBoLA-3:01701,YSEMYRERAGNIFVSNLYFWYEYYTWAAQNYRWY\nBoLA-3:01702,YSEMYRERAGNIFVSNLYFWYMYYTWAAQNYRWY\nBoLA-3:01703,YSEMYRERAGNIFVSNLYFWYMYYTWAEQNYRWY\nBoLA-3:02701,YSEMYRNNAGNSFVGTLYLWSMYYTWEYQNYEWH\nBoLA-3:02702,YSEMYRNNAGNSFVGTLYLWSMYYTWEYQNYEWH\nBoLA-3:03501,YYNMYQENAGNTFVGTLYLWSEFYTWAAHNYTWY\nBoLA-3:03601,YYAMYRNNADATFVNTLYFLYEYYTVADHNYRWY\nBoLA-3:03701,YSEMYRNNAGNSFVGTLYLLYMDYSRAVQNYRWY\nBoLA-3:03801,YNEMYRNNAGNDSVGTLYLWYMYYSMAVQNYTWY\nBoLA-3:05001,YSEMYRNNAGNTFGSNLYFLYTYYTWAEWNYTWH\nBoLA-3:05002,YSEMYRNNAGNTFGSNLYFWYMYYTWAEQNYTWH\nBoLA-3:05101,YSEMYRERAGNTFVNTLYIWYRDYTWAAENYTWY\nBoLA-3:05201,YYSMYRENSDTGFVDTLYLLYTYYSVAVQNYRWY\nBoLA-3:05301,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-3:05801,YSEMYRERAGNTFVGTLYLWYMDYSRAVQNYRWY\nBoLA-3:05901,YSEMYRNNAGNSFVGTLYLWSMFYTWEYQNYRWH\nBoLA-3:06501,YSEMYQEKAGTSSVGTLYLAYMFYSMAVQNYEWY\nBoLA-3:06601,YYEMYQEKADTTFVDTLYLLYTYYSMAEFNYTWY\nBoLA-3:06602,YYEMYQEKADTTFVDTLYLLYTFYSMAEFNYTWY\nBoLA-3:06801,YSIVYQNNAGTTFANTLYLLYMYYTWAAHNYEWY\nBoLA-3:07301,YYIIYQEISDTSFVSNLYLWYTYYSMAVQNYEWY\nBoLA-402401,YSIAYEQIVDTTFANTAYIAYSDYTWEYLNYTWY\nBoLA-402402,YSIAYEEIVDTTFANTAYLPYSDYTWTYLNYTWY\nBoLA-406301,YYSTYRENFETTFVNTLYILYTFYSRAALNYRGY\nBoLA-4:02401,YSIAYEQIVDTTFANTAYIAYSDYTWEYLNYTWY\nBoLA-4:02402,YSIAYEEIVDTTFANTAYLPYSDYTWTYLNYTWY\nBoLA-4:06301,YYSTYRENFETTFVNTLYILYTFYSRAALNYRGY\nBoLA-500301,YLIVYEERADHFFRGALYFEYEFYSWASYNYEWY\nBoLA-503901,YYIVYQEKADTFFLGTLYLWCWFYTWANENYEWY\nBoLA-506401,YYIVYQEKADHTFANTLYLWHWFYTWANENYEWY\nBoLA-507201,YYIVYQEKADHFFLGTLYLWYWFYSWAVQNYTWY\nBoLA-5:00301,YLIVYEERADHFFRGALYFEYEFYSWASYNYEWY\nBoLA-5:03901,YYIVYQEKADTFFLGTLYLWCWFYTWANENYEWY\nBoLA-5:06401,YYIVYQEKADHTFANTLYLWHWFYTWANENYEWY\nBoLA-5:07201,YYIVYQEKADHFFLGTLYLWYWFYSWAVQNYTWY\nBoLA-601301,YHTTYREISENWYEANLYLEYEYYSMAAFNYTWY\nBoLA-601302,YHTTYREISENWYEANLYLLYEYYSMAAFNYTWY\nBoLA-601401,YHTKYREISENWYEANLYYRYTFYTWAEFNYRGY\nBoLA-601402,YHTKYREISENKYEAILYYRYTFYTWAEFNYRWY\nBoLA-601501,YYTKYREISENWYEANLYLLYTFYSMADQNYRGY\nBoLA-601502,YYTKYREISENWYEANLYLQFTFYSMADQNYRGY\nBoLA-603401,YHTKYREISENVYGSNLYLLYTFYSMADRNYRGY\nBoLA-604001,YSEMYEERAGIVFVNTLYLWCWFYSMAAGKYTWY\nBoLA-604101,YHTKYREISENWYEATLYLEYEYYSMAAFNYRSY\nBoLA-6:01301,YHTTYREISENWYEANLYLEYEYYSMAAFNYTWY\nBoLA-6:01302,YHTTYREISENWYEANLYLLYEYYSMAAFNYTWY\nBoLA-6:01401,YHTKYREISENWYEANLYYRYTFYTWAEFNYRGY\nBoLA-6:01402,YHTKYREISENKYEAILYYRYTFYTWAEFNYRWY\nBoLA-6:01501,YYTKYREISENWYEANLYLLYTFYSMADQNYRGY\nBoLA-6:01502,YYTKYREISENWYEANLYLQFTFYSMADQNYRGY\nBoLA-6:03401,YHTKYREISENVYGSNLYLLYTFYSMADRNYRGY\nBoLA-6:04001,YSEMYEERAGIVFVNTLYLWCWFYSMAAGKYTWY\nBoLA-6:04101,YHTKYREISENWYEATLYLEYEYYSMAAFNYRSY\nBoLA-AW10,YSEMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-D18.4,YYSEYREISENVYESNLYIAYSDYTWEYLNYRWY\nBoLA-HD6,YHTTYREISENWYEANLYLEYEYYSMAAFNYTWY\nBoLA-JSP.1,YLEMYQEKAGNFFVSNLYLLSMFYSMAEQNYRWY\nBoLA-N:00101,YSEMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-N:00102,YSSMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-N:00103,YSIMYRERAGNFFVSNLYLWSMFYSMAEQNYRWY\nBoLA-N:00201,YLEMYQEKAGNFFVSNLYLLSMFYSMAEQNYRWY\nBoLA-N:00301,YLIVYEERADHFFRGALYFEYEFYSWASYNYEWY\nBoLA-N:00401,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-N:00402,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-N:00501,YYAEYRNIYDTIFVDTLYIAYWFYTWAAWNYEWY\nBoLA-N:00601,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWY\nBoLA-N:00602,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWH\nBoLA-N:00801,YLIMYRENSETTFANTAYVEYMDYTWADWNYRWY\nBoLA-N:00802,YLIMYRENSETTFANTAYVEYMDYTWADWNYRGY\nBoLA-N:00901,YYSMYREISENVYGSNLYLLYRDYTWEYLNYRWY\nBoLA-N:00902,YYSEYREISENVYESNLYLLYRDYTWEYLNYRWY\nBoLA-N:01001,YYSEYRNIYDTTFVDTLYLEYEYYSVAEFNYRGY\nBoLA-N:01101,YSEMYQEKAGTTFANIAYFWYMYYTWAEQNYTWY\nBoLA-N:01201,YYATYRENFDTTFVDTLYIAYRDYTWAEHNYTWY\nBoLA-N:01301,YHTTYREISENWYEANLYLEYEYYSMAAFNYTWY\nBoLA-N:01302,YHTTYREISENWYEANLYLLYEYYSMAAFNYTWY\nBoLA-N:01401,YHTKYREISENWYEANLYYRYTFYTWAEFNYRGY\nBoLA-N:01402,YHTKYREISENKYEAILYYRYTFYTWAEFNYRWY\nBoLA-N:01501,YYTKYREISENWYEANLYLLYTFYSMADQNYRGY\nBoLA-N:01502,YYTKYREISENWYEANLYLQFTFYSMADQNYRGY\nBoLA-N:01601,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEGY\nBoLA-N:01602,YSAEYRNIYDTTFVYTLYLWSMFYTWANENYEWY\nBoLA-N:01701,YSEMYRERAGNIFVSNLYFWYEYYTWAAQNYRWY\nBoLA-N:01702,YSEMYRERAGNIFVSNLYFWYMYYTWAAQNYRWY\nBoLA-N:01801,YYADYRNIYDTIFANTAYFEYMFYTWAEQNYRGY\nBoLA-N:01802,YYADYRNIYDTIFANTAYFEYMFYTWAEQNYRGY\nBoLA-N:01901,YHTKYREISENVYGSNLYYDYDYYTWAVFNYRGY\nBoLA-N:02001,YHTKYREISENVYGSNLYFLYMDYTWAVFNYRGY\nBoLA-N:02101,YYTKYREISENVYGSNLYFQFRYYTWADFNYEGY\nBoLA-N:02201,YHSEYEQIVDTSFVGTLYLLYEDYTRAALNYTGY\nBoLA-N:02301,YYSEYREISENVYESNLYIAYSDYTWEYLNYRWY\nBoLA-N:02401,YSIAYEQIVDTTFANTAYIAYSDYTWEYLNYTWY\nBoLA-N:02402,YSIAYEEIVDTTFANTAYLPYSDYTWTYLNYTWY\nBoLA-N:02501,YSAEYRNIYDTTFVYALYLWSWFYTWAAENYRGY\nBoLA-N:02601,YYAEYREISETTFVDTLYIEYEYYTWAYLNYRGY\nBoLA-N:02602,YYAEYREISETTFVDTLYIEYEYYTWAYLNYRGY\nBoLA-N:02701,YSEMYRNNAGNSFVGTLYLWSMYYTWEYQNYEWH\nBoLA-N:02702,YSEMYRNNAGNSFVGTLYLWSMYYTWEYQNYEWH\nBoLA-N:02801,YYTKYREISEKLYENTLYLQFRYYTWADFNYEWY\nBoLA-N:02901,YYTRYREISENLYKNTAYITFMYYTWANENYRGY\nBoLA-N:03001,YYSEYRNIYDTNFVSNLYLWSWFYTWANENYEWY\nBoLA-N:03101,YYTKYDEISENLYKNTLYIAFRDYTWAYLNYTWY\nBoLA-N:03401,YHTKYREISENVYGSNLYLLYTFYSMADRNYRGY\nBoLA-N:03501,YYNMYQENAGNTFVGTLYLWSEFYTWAAHNYTWY\nBoLA-N:03601,YYAMYRNNADATFVNTLYFLYEYYTVADHNYRWY\nBoLA-N:03701,YSEMYRNNAGNSFVGTLYLLYMDYSRAVQNYRWY\nBoLA-N:03801,YNEMYRNNAGNDSVGTLYLWYMYYSMAVQNYTWY\nBoLA-N:03901,YYIVYQEKADTFFLGTLYLWCWFYTWANENYEWY\nBoLA-N:04001,YSEMYEERAGIVFVNTLYLWCWFYSMAAGKYTWY\nBoLA-N:04101,YHTKYREISENWYEATLYLEYEYYSMAAFNYRSY\nBoLA-N:04201,YHTKYDEISENLYKDTLYIAYRDYTWEYLNYRGY\nBoLA-N:04301,YSEMYRERAGNTFVNTLYIWYRDYTWAVFNYLGY\nBoLA-N:04401,YYAMYEEKADTTFVDTLYIAYRDYTWAVFNYLGY\nBoLA-N:04501,YYATYRENLDTTFVDTLYIEYRDYTWAEFNYLGY\nBoLA-N:04601,YSEMYRERAGNTFVNTLYIWYRDYTWAEQNYTWY\nBoLA-N:04701,YSEMYQERAGNTFVDTLYLWYMDYTWAEQNYTWY\nBoLA-N:04801,YYSEYEQIVDTSFVGTLYLLYMDYTRAAQNYRGY\nBoLA-N:04901,YYAEYREISDTSFVGTLYIEYEYYTWAYLNYEGY\nBoLA-N:05001,YSEMYRNNAGNTFGSNLYFLYTYYTWAEWNYTWH\nBoLA-N:05101,YSEMYRERAGNTFVNTLYIWYRDYTWAAENYTWY\nBoLA-N:05201,YYSMYRENSDTGFVDTLYLLYTYYSVAVQNYRWY\nBoLA-N:05301,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nBoLA-N:05401,YYIMYQENSGATFANTLYFWYWFYTWANENYRGY\nBoLA-N:05501,YYAEYREISETTFVDSLYIAYRDYTWAYLNYRGY\nBoLA-N:05601,YYATYQENFDATFANTLYFLSTYYTWEAHNYRGY\nBoLA-T2C,YYIIYRNISDTSFVSNLYLLYTYYSMAVQNYEWH\nBoLA-T2a,YYATYRENFDTTFVDTLYIAYRDYTWAEHNYTWY\nBoLA-T2b,YHTKYREISENWYEATLYLEYEYYSMAAFNYRSY\nBoLA-T2c,YYIIYRNISDTSFVSNLYLLYTYYSMAVQNYEWH\nBoLA-T5,YYSEYREISENVYESNLYLLYRDYTWEYLNYRWY\nBoLA-T7,YLAMYRNNANTTFVNNLYVEHMYYSMAEQNYTWY\nBoLA-amani.1,YYATYRENLDATFVNTAYIAYMDYTWEYQNYEWY\nBoLA-gb1.7,YSEMYRNNAGNSFVNTLYLWSMYYTWAYQNYEWY\nChi-B0401,YRTYYGQIGLNINENIRRVWFRSYTWEEWNYTWY\nChi-B1201,YRDYYGQIGGNIDENILRVWYYMYTWGYLQYTWY\nChi-B1501,YSDAYSETSRTIDDGTLRVLYSDYTWGYLQYTWY\nDLA-8803401,YYAMYGEKVETLYVDTLYITYSDYTRADLNYTWY\nDLA-8850101,YYAMYPQTIETTFVDTLYRTYRDYTWAVWNYTWY\nDLA-8850801,YYATYGEKVETVYVDTLYITYRDYTWAVWNYTWY\nEqca-100101,YKSMYEETAGHTFGNIAYFWSSFYTWAEHNYRWY\nEqca-1600101,YYTMYRESVGHTFVNTLYLLYFYYTWAAFNYRSY\nEqca-16:00101,YYTMYRESVGHTFVNTLYLLYFYYTWAAFNYRSY\nEqca-1:00101,YKSMYEETAGHTFGNIAYFWSSFYTWAEHNYRWY\nGogo-B0101,YDTMYRETSAQTDENIAYIRFSSYTWAELAYTWY\nH-2-Db,YESYYREKAGQWFVSNLYLQSLFYTWSAYAYEWY\nH-2-Dd,YVEYYRERAGNSFVDTAYLWAWFYTWAADAYEWY\nH-2-Dq,YESYYRIIADNWFVSTAYIRYEFYTWGAYAYEWY\nH-2-Kb,YVEYYREKAGNSFVDTLYIVSQYYTWAELAYTWY\nH-2-Kd,YVAFYEQRASDWFVSTAYFRFQFYTWADYAYEWY\nH-2-Kk,YHSYYRNIAGNIFVNTAYFRYEYYTWADDAYTWY\nH-2-Kq,YHSYYRNIADNSSVDTLYIRYEVYTWAARAYAWH\nH-2-Ld,YESYYRIIAGQWFVNTLYLWYEFYTWAAYAYEWY\nH-2-Lq,YESYYRIIAGQWFVNTLYIRYEYYTWAAYAYEWY\nH-2-Qa1,YHIMYREKADMNFVNTLYLWYCEYSSVEQAYPWY\nH-2-Qa2,YHSMYREIAGHSFGSTAYLWYLFYTWAIDAYTSY\nH2-Db,YESYYREKAGQWFVSNLYLQSLFYTWSAYAYEWY\nH2-Dd,YVEYYRERAGNSFVDTAYLWAWFYTWAADAYEWY\nH2-Dq,YESYYRIIADNWFVSTAYIRYEFYTWGAYAYEWY\nH2-Kb,YVEYYREKAGNSFVDTLYIVSQYYTWAELAYTWY\nH2-Kd,YVAFYEQRASDWFVSTAYFRFQFYTWADYAYEWY\nH2-Kk,YHSYYRNIAGNIFVNTAYFRYEYYTWADDAYTWY\nH2-Kq,YHSYYRNIADNSSVDTLYIRYEVYTWAARAYAWH\nH2-Ld,YESYYRIIAGQWFVNTLYLWYEFYTWAAYAYEWY\nH2-Lq,YESYYRIIAGQWFVNTLYIRYEYYTWAAYAYEWY\nH2-Qa1,YHIMYREKADMNFVNTLYLWYCEYSSVEQAYPWY\nH2-Qa2,YHSMYREIAGHSFGSTAYLWYLFYTWAIDAYTSY\nHLA-A0101,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0102,YSAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0103,YFAMYQENMAHTDANTLYIMYRDYTWVARVYRGY\nHLA-A0104,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0106,YFAMYQENMAHTDANTLYIIYRDYTWVALAYRGY\nHLA-A0107,YFAMYQENVAHTDENTLYIIYRDYTWVARVYRGY\nHLA-A0108,YFAMYQENMAHTDANTLYIIYRDYTWVARVYWGY\nHLA-A0109,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0110,YFAMYQENMAHTDANTLYIIYRDYTWARRVYRGY\nHLA-A0111,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0112,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A0113,YFAMYQENMAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A0114,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTGY\nHLA-A0115,YFAMYQENMAHTDANTLYIIYRDYTWVARVYGGT\nHLA-A0117,YFAMYQENMAQTDANTLYIIYRDYTWVARVYRGY\nHLA-A0118,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0119,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A0120,YSAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A0121,YFAMYQENMAHTDANTLYIIYRDYTWAVRVYRGY\nHLA-A0122,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0123,YFAMYQENVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0124,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0125,YFAMYQENMAHTDANTLYIIYRDYTWVAQVYRGY\nHLA-A0126,YFAMYQENMAHTDANTLYIIYRDYTWAARVYRGY\nHLA-A01:01,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:02,YSAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:03,YFAMYQENMAHTDANTLYIMYRDYTWVARVYRGY\nHLA-A01:04,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:06,YFAMYQENMAHTDANTLYIIYRDYTWVALAYRGY\nHLA-A01:07,YFAMYQENVAHTDENTLYIIYRDYTWVARVYRGY\nHLA-A01:08,YFAMYQENMAHTDANTLYIIYRDYTWVARVYWGY\nHLA-A01:09,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:10,YFAMYQENMAHTDANTLYIIYRDYTWARRVYRGY\nHLA-A01:100,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:101,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:102,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:103,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:104,YFAMYQENMAHTHANTLYIIYRDYTWVARVYRGY\nHLA-A01:105,YFAMYQENIAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:106,YFAMYQENMAHTDANTLYIIYRDYSWVARVYRGY\nHLA-A01:107,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:108,YFAMYQENMAHTNANTLYIIYRDYTWVARVYRGY\nHLA-A01:109,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:110,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:111,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:112,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:113,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:114,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:115,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:116,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:117,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:118,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:119,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:12,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A01:120,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:121,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:122,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:124,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:125,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:126,YFAMYQENMAHTDANTLYIIYRDYTWVVRVYRGY\nHLA-A01:127,YFAMYQENMAHTDANTLYIIYRDYTWVAQAYRGY\nHLA-A01:128,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:129,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:13,YFAMYQENMAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A01:130,YFAMYQENMAHTDANTLYVRCRDYTWVARVYRGY\nHLA-A01:131,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:132,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:133,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:134,YFAMYQENMAHTHVNTLYIIYRDYTWVARVYRGY\nHLA-A01:135,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:136,YFAMYQENMAHTDANTLYIIYRDYTWAAQAYRGY\nHLA-A01:137,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:138,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:139,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:14,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTGY\nHLA-A01:140,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:141,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:142,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:143,YTAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:144,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:145,YFAMYQENMAHTDANTLYIIYQDYTWVARVYRGY\nHLA-A01:146,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:148,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:149,YFAMYQENMAHTDANTLYIIYRDYTWVARVYGGY\nHLA-A01:150,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:151,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:152,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:153,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:154,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:155,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:156,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:157,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:158,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:159,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:161,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:163,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:164,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:165,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:166,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:167,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:168,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:169,YFAMCQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:17,YFAMYQENMAQTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:170,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:171,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:172,YFAMYQENMAHTDANTQYIIYRDYTWVARVYRGY\nHLA-A01:173,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:174,YFAMYQENMAHTDANTLYIIYRDHTWVARVYRGY\nHLA-A01:175,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:176,YFAMYQENVAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A01:177,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:180,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:181,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:182,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:183,YFAMYQENMAHTDANILYIIYRDYTWVARVYRGY\nHLA-A01:184,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:185,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:187,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:188,YSAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:189,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:19,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A01:190,YSAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:191,YFAMYQEKVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:192,YFAMYQENMAHTDANTLYIMYRDYTWAARVYRGY\nHLA-A01:193,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:194,YFAMYQENMAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A01:195,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:196,YFAMYQENMAHTDANTLYIIYRDYTWVERVYRGY\nHLA-A01:197,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:198,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:199,YFSMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:20,YSAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A01:200,YFAMYQENMAHTDANTLYIIYRDYTWAVLAYTWY\nHLA-A01:201,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:202,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:203,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:204,YFAMYQENMTHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:205,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:206,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:207,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:209,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:21,YFAMYQENMAHTDANTLYIIYRDYTWAVRVYRGY\nHLA-A01:210,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:211,YFAMYQENMAHSDANTLYIIYRDYTWVARVYRGY\nHLA-A01:212,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:213,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:214,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:215,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:216,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:217,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:218,YFAMYQENMAHTDANTLYIIYRGYTWVARVYRGY\nHLA-A01:219,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:220,HFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:221,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:222,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:223,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:224,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:225,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:226,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:227,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:229,YFAMYQENMAHTHVDTLYIIYRDYTWVARVYRGY\nHLA-A01:23,YFAMYQENVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:230,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:231,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:232,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:233,YFAMYQENMAHTDANTLYIIYRDYTWVARIYRGY\nHLA-A01:234,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:235,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:236,YFAMYQENMAHTDANTLYIIYHYYTWVARVYRGY\nHLA-A01:237,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:238,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:239,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:24,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:241,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:242,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:243,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:244,YFAMYQENMAHTDANTLYIIYRDYTWAVLAYTWY\nHLA-A01:245,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:246,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:249,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:25,YFAMYQENMAHTDANTLYIIYRDYTWVAQVYRGY\nHLA-A01:251,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:252,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:253,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:254,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:255,YFAMYQENMAHTDANTLYITYRDYTWVARVYRGY\nHLA-A01:256,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:257,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:259,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:26,YFAMYQENMAHTDANTLYIIYRDYTWAARVYRGY\nHLA-A01:260,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:261,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:262,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:263,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:264,YFAMYQENMAHTDANTLYIIYRDYTWFARVYRGY\nHLA-A01:265,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:266,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:267,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:268,YFAMYQENMAHTDANTLYIIYRDQTWVARVYRGY\nHLA-A01:270,YFAMYQENMAHTGANTLYIIYRDYTWVARVYRGY\nHLA-A01:271,YFAMYQENMAHTDANTLYIIYWDYTWVARVYRGY\nHLA-A01:272,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:273,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:274,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:275,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:276,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:277,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:278,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:279,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:28,YFAMYQENMAHTDVDTLYIIYRDYTWVARVYRGY\nHLA-A01:280,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:281,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:282,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:283,YFTMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:284,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRRY\nHLA-A01:286,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:288,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:289,YFAMYQENMAHTDENIAYIIYRDYTWVARVYRGY\nHLA-A01:29,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:291,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:292,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:294,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:295,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:296,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:297,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:30,YFAMYQENMAHTDANTLYIIYHYYTWVARVYRGY\nHLA-A01:32,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:33,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:35,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:36,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:37,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:38,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:39,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:40,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:41,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:42,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:43,YYAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:44,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:45,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:46,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:47,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:48,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:49,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:50,YFAMYQENMAHTDANTLYIIYREYTWVARVYRGY\nHLA-A01:51,YFAMYRNNVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:54,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:55,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:58,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:59,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:60,YFAMYPENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:61,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:62,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:63,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:64,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:65,YFAMYQENMAHTDANTLYIIYRDYTWVARVCRGY\nHLA-A01:66,YFAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A01:67,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:68,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:69,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:70,YFAMYQENMAHTDANTLYIIYRDYTCVARVYRGY\nHLA-A01:71,YFAMYQDNMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:72,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRWY\nHLA-A01:73,YFAMYQENMAHTDANTLYLRYRDYTWVARVYRGY\nHLA-A01:74,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:75,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:76,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:77,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:78,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:79,YFAMYQENMAHTDANTLYIIYPDYTWVARVYRGY\nHLA-A01:80,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:81,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:82,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:83,YFAMYGEKVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:84,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:85,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:86,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:88,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:89,YFAMYQENMAHTDANTLYLIYRDYTWVARVYRGY\nHLA-A01:90,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:91,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:92,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:93,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:94,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:95,YFAMYQENMAHTDENIAYIIYRDYTWVARVYRGY\nHLA-A01:96,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:97,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A01:98,YFAMYQENMAHTDANTLYIIYRDYTWVARAYRGY\nHLA-A01:99,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A0201,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0202,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A0203,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A0204,YFAMYGEKVAHTHVDTLYVMYHYYTWAVLAYTWY\nHLA-A0205,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A0206,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0207,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A0208,YYAMYGENVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A0209,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0210,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A0211,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A0212,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A0213,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYTWY\nHLA-A0214,YYAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A0215,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A0216,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A0217,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A0218,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A0219,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A0220,YFAMYGENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0221,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0222,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A0224,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0225,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0226,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A0227,YFAMYGEKVAHTHVDTLYVRYHYYTWAAQAYTWY\nHLA-A0228,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0229,YFAMYGEQVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0230,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0231,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0233,YFAMYGEKVAHTHVDTLYVRSHYYTWAVLAYTWY\nHLA-A0234,YFAMYGEKVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0235,YFAMYGEKVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A0236,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTGY\nHLA-A0237,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A0238,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYRWY\nHLA-A0239,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A0240,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0241,YYAMYGEKVAHTHVDTLYVRYQYYTWAVLAYTWY\nHLA-A0242,YFSMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0243,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0244,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A0245,YFAMYQEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0246,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0247,YFAMYGEKVAHSHVDTLYLRYHYYTWAVWAYTWY\nHLA-A0248,YFAMYEEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A0249,YFAMYGEKVAHTHVDTLYVRYHYYTWAVRAYTWY\nHLA-A0250,YFAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A0251,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0252,YFAMYGEKVAHTHVDTLYVRYEHYTWAVLAYTWY\nHLA-A0254,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A0255,YFAMYRNNVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0256,YFAMYQENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0257,YYAMYGEKVAHTHVDTLYLMYHYYTWAVLAYTWY\nHLA-A0258,YFAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A0259,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0260,YFAMYGEKVAHTHVDTLYVRYHFYTWAVLAYTWY\nHLA-A0261,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0262,YFAMYGENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0263,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A0264,YFAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A0265,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A0266,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0267,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0268,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0269,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A0270,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0271,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0272,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0273,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A0274,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0275,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0276,YSAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0277,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0278,YYAMYQENVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A0279,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0280,YFAMYGEKVAHTHVDTLYVRYQDYTWAVLAYTWY\nHLA-A0281,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A0283,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0284,YYAMYGEKVAHTHVDTLYFRYHYYTWAVLAYTWY\nHLA-A0285,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0286,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0287,YFAMYGEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A0289,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0290,YFAMYGEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A0291,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0292,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0293,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0295,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0296,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0297,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A0299,YYAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A02:01,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:02,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:03,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:04,YFAMYGEKVAHTHVDTLYVMYHYYTWAVLAYTWY\nHLA-A02:05,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:06,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:07,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:08,YYAMYGENVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:09,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:10,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A02:101,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYRWY\nHLA-A02:102,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:103,YFAMYQENVAQTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:104,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A02:105,YFAMYGEKVAHTHVDTLYVRYEYYTWAVLAYTWY\nHLA-A02:106,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:107,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:108,YYAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A02:109,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:11,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:110,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A02:111,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:112,YFAMYGEKVAHTDENIAYVRCHYYTWAVLAYTWY\nHLA-A02:114,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A02:115,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:116,YFAMYGEKVAHTHLDTLYVRYHYYTWAVLAYTWY\nHLA-A02:117,YFAMYGEKVAHTHVDTLYVRYQDYTWAEWAYTWY\nHLA-A02:118,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:119,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:12,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A02:120,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:121,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:122,YYAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A02:123,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:124,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A02:126,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:127,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYKWY\nHLA-A02:128,YFAMYGENVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:129,YYAMYEEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A02:13,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYTWY\nHLA-A02:130,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:131,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A02:132,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:133,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:134,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:135,YFAMYGEKVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A02:136,YFAMYGEKVAHTDENIAYVRYHYYTWAVWAYTWY\nHLA-A02:137,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:138,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:139,YFAMYGEKVTHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:14,YYAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A02:140,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:141,YFVMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:142,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A02:143,YYAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:144,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:145,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:146,YFAMYGEKVAHTDANTLYVRYHYYTWAVLAYTWY\nHLA-A02:147,YFAMYGEKVAHTHVDTLYVRYDYYTWAVLAYTWY\nHLA-A02:148,YFAMYGEKVAHTHVDTLYVRFHYYTWAEWAYTWY\nHLA-A02:149,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:150,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:151,YFAMYGEKVAHTHVDTLYVRYDYYTWAVLAYTWY\nHLA-A02:152,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A02:153,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:154,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYRWY\nHLA-A02:155,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:156,YFAMYGEKVAHTHVDTLYIIYHYYTWAVLAYTWY\nHLA-A02:157,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:158,YFAMYGEKVAHAHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:159,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:16,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A02:160,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:161,YFAVYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:162,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:163,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:164,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:165,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:166,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:167,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYKWY\nHLA-A02:168,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:169,YYAMYQENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:17,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A02:170,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:171,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A02:172,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:173,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:174,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:175,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:176,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:177,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:178,YYAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A02:179,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:18,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:180,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:181,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:182,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:183,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:184,YFAMYGEKVAHTHEDTLYVRYHYYTWAVLAYTWY\nHLA-A02:185,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:186,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:187,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:188,YFAMYGEKVAHTHVDTLYVRYDSYTWAVLAYTWY\nHLA-A02:189,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:19,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A02:190,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:191,YFAMYGEKVAHTHVDTLYVRCHYYTWAVWAYTWY\nHLA-A02:192,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:193,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:194,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:195,YFAMYQENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:196,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:197,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:198,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:199,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:20,YFAMYGENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:200,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:201,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:202,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:203,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:204,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:205,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:206,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:207,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:208,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:209,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:21,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:210,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:211,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:212,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:213,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:214,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:215,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:216,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:217,YFAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:218,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:219,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:22,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A02:220,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:221,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:224,YFAMYGEKVAHTHVDTLYVGYHYYTWAVLAYTWY\nHLA-A02:228,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:229,YYAMYGEKVAHTHVDTLYLRYRYYTWAVWAYTWY\nHLA-A02:230,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:231,YFAMYGEKVAHTHVDTLYVRNHYYTWAVLAYTWY\nHLA-A02:232,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:233,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTRY\nHLA-A02:234,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:235,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:236,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:237,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:238,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:239,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:24,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:240,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:241,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:242,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A02:243,YTAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:244,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A02:245,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A02:246,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A02:247,YFAMYGEKVAHTDENTLYVRYHYYTWAVLAYTWY\nHLA-A02:248,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:249,YFAMYVEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:25,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:251,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:252,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:253,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:254,YFAMYGEKVAHTHVDTLYVRYNFYTWAVLAYTWY\nHLA-A02:255,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTGY\nHLA-A02:256,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:257,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:258,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:259,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:26,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A02:260,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:261,YFAMYGEKVAHTHMDTLYVRCHYYTWAVLAYTWY\nHLA-A02:262,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLVYTWY\nHLA-A02:263,YFAMYGEKVAHTHVDTLYVRYHYYTWSVLAYTWY\nHLA-A02:264,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:265,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:266,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:267,YFAMYGEKVAHTHVDTLYVRYHYYTWAAWAYTWY\nHLA-A02:268,YFAMYGEKVAHTHVDTLYVMFHYYTWAVLAYTWY\nHLA-A02:269,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:27,YFAMYGEKVAHTHVDTLYVRYHYYTWAAQAYTWY\nHLA-A02:270,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:271,YYAMYGEKVAHTHVDTLYLRYHYYTWAVQAYTWY\nHLA-A02:272,YFAMYGEKLAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:273,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:274,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:275,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:276,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:277,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:278,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:279,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A02:28,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:280,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYTWY\nHLA-A02:281,YFAMYGEKVAHTHVDILYVRYHYYTWAEWAYTWY\nHLA-A02:282,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:283,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:285,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:286,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:287,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:288,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:289,YFAMYGEKVAHTHVDTLYVRYQYYTWAVLAYTWY\nHLA-A02:29,YFAMYGEQVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:290,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:291,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:292,YFAMYGEKVSHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:294,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:295,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:296,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:297,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:298,YFAMYGEKVAHIDVDTLYVRYHDYTWAVLAYTWY\nHLA-A02:299,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:30,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:300,YYAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A02:302,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:303,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A02:304,YFAMYGEKVAHTHVDTLYVRYQDYTWAVLAYTWY\nHLA-A02:306,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:307,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:308,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:309,YFAMYGEKVAHTHVDTLYVRYQDYTWAVLAYTWY\nHLA-A02:31,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:310,YYSMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:311,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:312,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:313,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:315,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:316,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:317,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:318,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:319,YSAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:320,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:322,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:323,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:324,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:325,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:326,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:327,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:328,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:329,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:33,YFAMYGEKVAHTHVDTLYVRSHYYTWAVLAYTWY\nHLA-A02:330,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:331,YYAMYGEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:332,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWH\nHLA-A02:333,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:334,YFAMYGEKVAHTHVDTLYIMYHYYTWAVLAYTWY\nHLA-A02:335,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:336,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:337,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:338,YFAMYGEKVAHTHVDTLYIIYHYYTWAVLAYTWY\nHLA-A02:339,YFAMYGEKVAHTHVDTLYVRYDLYTWAVLAYTWY\nHLA-A02:34,YFAMYGEKVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:340,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:341,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:342,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:343,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:344,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:345,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:346,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:347,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:348,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:349,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:35,YFAMYGEKVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:351,YFAMYGEKVARTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:352,CFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:353,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:354,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYAWY\nHLA-A02:355,YYAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:357,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:358,YYAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:359,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:36,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTGY\nHLA-A02:360,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:361,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:362,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:363,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:364,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:365,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:367,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:368,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:369,YFAMYEEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:37,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A02:370,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:371,YFAMYGEKVAHTHVDTLYVRYHYYIWAVLAYTWY\nHLA-A02:372,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:374,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:375,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:376,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYLWY\nHLA-A02:377,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:378,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:379,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:38,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYRWY\nHLA-A02:380,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:381,YFAMYGEKVAHTHVDSLYVRYHYYTWAVLAYTWY\nHLA-A02:382,YYAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A02:383,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:384,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:385,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYMWY\nHLA-A02:386,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:387,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:388,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:389,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:39,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A02:390,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:391,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:392,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:393,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A02:394,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:396,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:397,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:398,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A02:399,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:40,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:400,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:401,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:402,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:403,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLTYTWY\nHLA-A02:404,YYAMYGEKVAHTHVDTLYVRYHHYTWAVLAYTWY\nHLA-A02:405,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:406,YFAMYGEKVAHTHVDTLYVRYHDYTWAVLAYTWY\nHLA-A02:407,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:408,YFAMYGEKVAHTHVDTLYVRCHYYTWAALAYTWY\nHLA-A02:409,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:41,YYAMYGEKVAHTHVDTLYVRYQYYTWAVLAYTWY\nHLA-A02:410,YFAMYAEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:411,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:412,YFAMYGEKVAHTHVDTLYVRYHSYTWAEWAYTWY\nHLA-A02:413,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:414,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:415,YYAMYGENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:416,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:417,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A02:418,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:419,YYAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:42,YFSMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:420,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:421,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:422,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:423,YFAMYGEKVAHTHVDTLYVRYHHYTWAVLAYTWY\nHLA-A02:424,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:425,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:426,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:427,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:428,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:429,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:430,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:431,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:432,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTSY\nHLA-A02:433,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:434,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:435,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:436,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:437,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYEWY\nHLA-A02:438,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:44,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A02:441,HFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:442,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:443,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:444,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:445,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:446,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:447,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYRWY\nHLA-A02:448,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:449,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:45,YFAMYQEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:450,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:451,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:452,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:453,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A02:454,YYAMYGEKVAHTHVDTLYVRYQDYTWAVLAYTWY\nHLA-A02:455,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:456,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:457,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:458,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:459,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:46,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:460,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:461,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:462,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:463,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:464,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:465,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:466,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:467,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:469,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:47,YFAMYGEKVAHSHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:470,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:471,YYAMYGEKVVHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:472,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:473,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:474,YYAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:475,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:477,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:478,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:479,YFAMYGEKVAHSHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:48,YFAMYEEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:480,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:481,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:482,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:483,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:484,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:485,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:486,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:487,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A02:488,YFAMYGEKVAHTHVDTLYVRYHYCTWAVLAYTWY\nHLA-A02:489,YYAMYGEKVAHTHVDTLYLRYHYYTWAEWAYTWY\nHLA-A02:49,YFAMYGEKVAHTHVDTLYVRYHYYTWAVRAYTWY\nHLA-A02:491,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:492,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:493,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:494,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:495,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:496,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:497,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:498,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:499,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:50,YFAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A02:502,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:503,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:504,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:505,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:507,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:508,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:509,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:51,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:510,YFAMYGEKVAHTHVDTLYVRYHLYTWAVLAYTWY\nHLA-A02:511,YFAMYGEKVAHTHVDTLYVSYHYYTWAVLAYTWY\nHLA-A02:512,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:513,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:515,YFAMYGEKVAHTHMDTLYVRYHYYTWAVLAYTWY\nHLA-A02:517,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:518,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:519,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:52,YFAMYGEKVAHTHVDTLYVRYEHYTWAVLAYTWY\nHLA-A02:520,YFAMYGEKVAHTHVDTLYVRYYYYTWAVLAYTWY\nHLA-A02:521,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:522,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:523,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:524,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:526,YFAMYGEKVAHTHVDTLYVKYHYYTWAVLAYTWY\nHLA-A02:527,YYAMYGEKVAHTHVDTLYLRYRDYTWAVWAYTWY\nHLA-A02:528,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:529,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYMWY\nHLA-A02:530,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:531,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:532,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:533,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:534,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:535,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:536,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:537,YFAMYGEKVAHTHVDTLYVRYHYYTWDVLAYTWY\nHLA-A02:538,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:539,YFAMYGEKVAHTHVDTLYVRYHYYTLAVLAYTWY\nHLA-A02:54,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A02:541,YFAMYGEKVAHTHVDTLYVRCHYYTWAELAYTWY\nHLA-A02:542,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:543,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYRWY\nHLA-A02:544,YFAMYGEKVAHTHVDTLYVRCHYYTWAEWAYTWY\nHLA-A02:545,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:546,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:547,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLVYTWY\nHLA-A02:548,YFAMYGEKVAHTHVDTLYVRHHYYTWAVLAYTWY\nHLA-A02:549,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:55,YFAMYRNNVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:550,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:551,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:552,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:553,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:554,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:555,YFAMYGEKVAHTHVDTLYVRYNYYTWAVLAYTWY\nHLA-A02:556,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:557,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:558,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:559,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:56,YFAMYQENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:560,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A02:561,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:562,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:563,YFAMYGEKVAHTHVDTLYVRYHYYAWAVLAYTWY\nHLA-A02:564,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:565,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:566,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:567,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:568,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:569,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:57,YYAMYGEKVAHTHVDTLYLMYHYYTWAVLAYTWY\nHLA-A02:570,YFTMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:571,YFAMYEEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A02:572,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:573,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:574,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:575,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:576,YFAMYGEKVAHTHVDTLYVRYHYYTWVVLAYTWY\nHLA-A02:577,YYAMYGEKVAHTHGDTLYLRYHYYTWAVWAYTWY\nHLA-A02:578,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:579,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:58,YFAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A02:580,YFAMYGEKVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:581,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A02:582,YFAMYGEKVAHTHVDTLYVRYRDYTWAVWAYTWY\nHLA-A02:583,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:584,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:585,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:586,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:587,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:588,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:589,YFAMYGEKVAHIDVDTLYVRYHYYTWAELAYTWY\nHLA-A02:59,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:590,YFAMYGEKVAHTHVDTLYVRYHYYTWAALAYTWY\nHLA-A02:591,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:592,YYAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A02:593,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:594,YFAMYGEKVAHTHVDTLYVRYNFYTWAVLAYTWY\nHLA-A02:595,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:596,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:597,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:598,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:599,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:60,YFAMYGEKVAHTHVDTLYVRYHFYTWAVLAYTWY\nHLA-A02:600,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:601,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:602,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:603,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:604,YFAMYGEKVAHTHVDTLYVRIHYYTWAVLAYTWY\nHLA-A02:606,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:607,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:609,YFAMYGENMAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:61,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:610,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:611,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:612,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:613,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:614,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:615,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:616,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:617,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A02:619,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A02:62,YFAMYGENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:620,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:621,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:623,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:624,YFAMYGEKVAHTHVDTLCVRYHYYTWAVLAYTWY\nHLA-A02:625,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:626,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:627,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:628,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A02:629,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:63,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:630,YYAMYGEKVAHTHVDTLYVRFHYYTWAVQAYTWY\nHLA-A02:631,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:632,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:633,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:634,YFAMYGENVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:635,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:636,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:637,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:638,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:639,YFAMYGEKVAHTHVDILYVRYHYYTWAVLAYTWY\nHLA-A02:64,YFAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A02:640,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:641,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:642,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:644,YFAMYRNNVAHTDANTLYVRYHYYTWAVLAYTWY\nHLA-A02:645,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:646,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:647,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:648,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:649,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:65,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A02:650,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:651,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A02:652,YFAMYGEKVAHTHVDTLNVRCHYYTWAVLAYTWY\nHLA-A02:653,YFAMYGEKVAHTHVDTLHVRYHYYTWAVLAYTWY\nHLA-A02:654,YFAMYGEKVAHTHVDTLYVRYHYYTCAVLAYTWY\nHLA-A02:655,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:656,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:657,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A02:658,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:659,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:66,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:660,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:661,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:662,YFAMYGEKVAHTHVDTLYVRYRDYTWAAQAYTWY\nHLA-A02:663,YFAMYGEKVAYTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:664,YFAMYGEKVAHTHVDTLYVMYHYYTWAVLAYTWY\nHLA-A02:665,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:666,YSAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:667,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:668,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:669,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:67,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:670,YYAMYGEKVAHTHVDTLHLRYHYYTWAVWAYTWY\nHLA-A02:671,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:673,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:674,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:676,YYAMYGEKVAHTHVDTLYLRYHSYTWAVWAYTWY\nHLA-A02:677,YFAMYGEKVDHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:678,YFAMYGEKVAHTHVDTLYVRCHSYTWAVLAYTWY\nHLA-A02:679,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:68,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:680,YFAMYGEKVAHTHVDTLYLMFHYYTWAVWAYTWY\nHLA-A02:681,YFAMYGEKVAHTHVDTLYVRYRYYTWAVLAYTWY\nHLA-A02:682,YFAMYGEKVAHTHVDTLYVRYHYYTWVARAYTWY\nHLA-A02:683,YFAMYGEKVAHTHVDTLYVRYHYYTWAVRAYTWY\nHLA-A02:684,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:685,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:686,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:687,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:688,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:689,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:69,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:690,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:692,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:693,YFAMYGEKVAHTHVDTLYVRYHYYTWAVFAYEWY\nHLA-A02:694,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:695,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:697,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:698,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:699,YFAMYGEKVAHTHVDTLYVRYHYYTWAGLAYTWY\nHLA-A02:70,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:700,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQVYTWY\nHLA-A02:701,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:702,YFAMYGEKVALTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:703,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:704,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:705,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:706,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:707,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:708,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLANTWY\nHLA-A02:709,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:71,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:711,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:712,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:713,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:714,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:716,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:717,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:718,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:719,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:72,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:720,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:721,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:722,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:723,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A02:724,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:725,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:726,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:727,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:728,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:729,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:73,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A02:730,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:731,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:732,YFAMYGEKVAHTHVYTLYVRYHYYTWAVLAYTWY\nHLA-A02:733,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:734,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:735,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:736,YFAMYGEKVAHTHVDTLYVWYHYYTWAVLAYTWY\nHLA-A02:737,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:738,YFAMYGEKVVHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:739,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:74,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:740,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:741,YFAMYRNKVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:742,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:743,YFAMYGEKVAHTHVDTLYVRYNYYTWAVLAYTWY\nHLA-A02:744,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:745,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:746,YFAMYWEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:747,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:749,YFAMYGEKVAHTDANTLYVRYHYYTWAVLAYTWY\nHLA-A02:75,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:750,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:751,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:752,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:753,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:754,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:755,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:756,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:757,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:758,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:759,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:76,YSAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:761,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:762,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:763,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:764,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:765,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:766,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:767,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:768,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:769,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:77,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:770,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:771,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:772,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:774,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:776,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:777,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:778,YYAMYGEKVAHNHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:779,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:78,YYAMYQENVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:780,YFAMYGEQVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:781,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:782,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A02:783,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:784,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:785,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:786,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:787,YFAMYGEKVVHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:79,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:790,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:794,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:795,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:798,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:799,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:80,YFAMYGEKVAHTHVDTLYVRYQDYTWAVLAYTWY\nHLA-A02:800,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:801,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:802,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:804,YFAMYGEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A02:808,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:809,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A02:81,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A02:810,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:811,YFAMYGEKVAHTHVDTLYVRYHYYTWAVFAYTWY\nHLA-A02:812,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:813,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:814,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:815,YFAMYRNNVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:816,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:817,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A02:818,YYAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A02:819,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:820,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:821,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:822,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A02:823,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:824,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:825,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:84,YYAMYGEKVAHTHVDTLYFRYHYYTWAVLAYTWY\nHLA-A02:85,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:86,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:87,YFAMYGEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A02:89,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:90,YFAMYGEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:91,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:92,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:93,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:95,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:96,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:97,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A02:99,YYAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A0301,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0302,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A0303,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0304,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0305,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0306,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0307,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A0308,YFAMYQENVAHTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0309,YFAMYQENVAQTHVDTLYIIYRDYTWAELAYTWY\nHLA-A0310,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A0312,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0313,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0314,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0315,YFAMYQENVAQTDVDTLYIIFRDYTWAELAYTWY\nHLA-A0316,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0317,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0318,YFAMYQENVAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A0319,YFAMYQENVAQTDVDTLYIIFHYYTWAELAYTWY\nHLA-A0320,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0321,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0322,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0323,YFAMYGEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0324,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0325,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0326,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0327,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0328,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0329,YFAMYQENVVQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A0330,YFAMYEEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:01,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:02,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:04,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:05,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:06,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:07,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A03:08,YFAMYQENVAHTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:09,YFAMYQENVAQTHVDTLYIIYRDYTWAELAYTWY\nHLA-A03:10,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:100,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:101,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:102,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:103,YFAMYQENVAQTDVDTLYIIYQDYTWAELAYTWY\nHLA-A03:104,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWH\nHLA-A03:105,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYRWY\nHLA-A03:106,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:107,YFAMYQENMAHTDANTLYIIYRDYTWAELAYTWY\nHLA-A03:108,YFAMYQENVAHTHVDTLYIIYRDYTWAELAYTWY\nHLA-A03:109,YFAMYQENVAQTDVHTLYIIYRDYTWAELAYTWY\nHLA-A03:110,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:111,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:112,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:113,YFAMYQEKVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:114,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:115,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:116,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:117,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:118,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:119,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:12,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:120,YFAMYQENVAQTDVDTLYIIYRDCTWAELAYTWY\nHLA-A03:121,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:122,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTGY\nHLA-A03:123,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:124,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:125,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:126,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:127,YFAMYQENVAQTDVDTLYIIYRDYTWAALAYTWY\nHLA-A03:128,YFAMYQENVAQTDLDTLYIIYRDYTWAELAYTWY\nHLA-A03:13,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:130,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:131,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:132,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:133,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A03:134,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:135,YFAMYQENVAQTDVDTLYIIYRDYTWAERVYRGY\nHLA-A03:136,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:137,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:138,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:139,YFAMYQENVAQTDVDTLYIIYRDYTWAKLAYTWY\nHLA-A03:14,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:140,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:141,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:142,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:143,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:144,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:145,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:146,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:147,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:148,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:149,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:15,YFAMYQENVAQTDVDTLYIIFRDYTWAELAYTWY\nHLA-A03:150,YFAMYQENVAQTDVDTLYIIYRDYTWAELVYTWY\nHLA-A03:151,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:152,YFAMYEEKVAHTDENIAYIIYRDYTWAELAYTWY\nHLA-A03:153,YFAMYQENVAQTDVDTLYIIYRDYTWAERVYTWY\nHLA-A03:154,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:155,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:156,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:157,YFAMYQEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:158,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:159,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:16,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:160,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:163,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:164,YFAMYQENMAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:165,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:166,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:167,YFAMYQENVAQTDVDTLYIIYRDYTWAEQAYTGY\nHLA-A03:169,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:17,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:170,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:171,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A03:172,YFAMYQEKVAHTHVDTLYIIYRDYTWAELAYTWY\nHLA-A03:173,YFAMYQENVAQTDEDTLYIIYRDYTWAELAYTWY\nHLA-A03:174,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:175,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:176,YFAMYQEKVAHTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:177,YFAMYQENVAQTDVDTLYIRYRDYTWAELAYTWY\nHLA-A03:179,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:18,YFAMYQENVAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A03:180,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:181,YFAMYQENVAQTDVDSLYIIYRDYTWAELAYTWY\nHLA-A03:182,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:183,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:184,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:185,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:186,YFAMYQENVAQTDVDTLYIIYEHYTWAELAYTWY\nHLA-A03:187,YFAMYQENVAQTDVDTLYIIYRDYTWVARVYTWY\nHLA-A03:188,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:189,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:19,YFAMYQENVAQTDVDTLYIIFHYYTWAELAYTWY\nHLA-A03:190,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:191,YFAMYQENVAQTDVDTLYIIYGDYTWAELAYTWY\nHLA-A03:193,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:195,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:196,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:198,YFAMYGEKVAHTHVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:199,YFAMYQENVAQSDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:20,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:201,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:202,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:203,YFAMYQENVAQSDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:204,YFAMYQENVAQTDVDTLYMVYRDYTWAELAYTWY\nHLA-A03:205,YTAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:206,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:207,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:208,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYEWY\nHLA-A03:209,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:210,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:211,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:212,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:213,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:214,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:215,YFAMYQENVAQTDVDTLYIMYRDYTWAELAYTWY\nHLA-A03:216,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:217,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:218,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:219,YFAMYQENVAQTDENIAYIIYRDYTWAELAYTWY\nHLA-A03:22,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:220,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:221,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:222,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:223,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:224,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:225,YFAMYQENVAQTDVDTLYIIYRDYTWAERAYTWY\nHLA-A03:226,YFAMYQENVAQTDVDTLYIIYPDYTWAELAYTWY\nHLA-A03:227,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:228,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:229,YFAMYQENVAQTDVDTLYIIYRDYTWAEQAYTWY\nHLA-A03:23,YFAMYGEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:230,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:231,YFAMYQENVAQTDVDTLYIIYRDYTWARLAYTWY\nHLA-A03:232,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:233,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:235,YFAMYQENVAQTDVDTLYNIYRDYTWAELAYTWY\nHLA-A03:236,YFAMYQENVAQTDVDTLYIIYGDYTWAVQAYTWY\nHLA-A03:237,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:238,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:239,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:24,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:240,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:241,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:242,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:243,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:244,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:245,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:246,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:247,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:248,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:249,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:25,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:250,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:251,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:252,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:253,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:254,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:255,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:256,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:257,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:258,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:259,YFAMYQENVAQTYVDTLYIIYRDYTWAELAYTWY\nHLA-A03:26,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:260,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:261,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:263,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:264,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:265,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:267,YFAMYQENVAQTDVNTLYIIYRDYTWAELAYTWY\nHLA-A03:268,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:27,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:270,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:271,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:272,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:273,YFAMYEEKVAHTDENTLYIIYRDYTWAELAYTWY\nHLA-A03:274,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:276,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:277,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:278,YFAMYLQNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:28,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:280,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:281,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:282,YFAMYQENVAQTDVDTLYIIYQDYTWAELAYTWY\nHLA-A03:285,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:287,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:288,YFAMYQENVAQTDVDTLYMIYRDYTWAELAYTWY\nHLA-A03:289,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:29,YFAMYQENVVQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:290,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:291,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:292,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:293,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:294,YFAMYQENVAQTDVDTLYIIYRDYIWAELAYTWY\nHLA-A03:295,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:296,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:298,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:299,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:30,YFAMYEEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:300,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:301,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:302,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:303,YFAMYEENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:304,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:305,YFAMYQENVAQTDVDILYIIYRDYTWAELAYTWY\nHLA-A03:306,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:307,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:308,YFAMYQENVAQTDVDTLYIIYRDYTWAELAHTWY\nHLA-A03:309,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:31,YFAMYQENVAQTDVDTLYIIYRYYTWAVQAYTWY\nHLA-A03:310,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:311,YFAMYQENVAQTDVDTLYIIHRDYTWAELAYTWY\nHLA-A03:312,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:313,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:314,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:315,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:316,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:317,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:318,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:319,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:32,YFAMYQENVAHIDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:320,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:321,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:322,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:324,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:325,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:326,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYKWY\nHLA-A03:327,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:328,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:33,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:331,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:332,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A03:333,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:34,YFAMYQENVAPTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:35,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:37,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:38,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:39,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:40,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:41,YFAMYQENVAHTDANTLYIIYRDYTWAELAYTWY\nHLA-A03:42,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A03:43,YFAMYQENVAQTDVDTLYIIYEHYTWAELAYTWY\nHLA-A03:44,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:45,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:46,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:47,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:48,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:49,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:50,YFAMYQENVAQTDVDTLYIIYRDYTWAEWAYTWY\nHLA-A03:51,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:52,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:53,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:54,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:55,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:56,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:57,YFAMYQENVAQTDANTLYIIYRDYTWAELAYTWY\nHLA-A03:58,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:59,CFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:60,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:61,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:62,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:63,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:64,YFAMYQENVAQTDVDTLYIIYRDYTWADLAYTWY\nHLA-A03:65,YFAMYQENVAQTDVDTLYIIYRDYTWAEQAYTWY\nHLA-A03:66,YFAMYQENVAQTDVDTLYIIYRDYTWAERAYTWY\nHLA-A03:67,YFATYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:70,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:71,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:72,YSAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:73,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:74,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:75,YFAMYQENVAQTDVDTLYLMYRDYTWAELAYTWY\nHLA-A03:76,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:77,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:78,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:79,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:80,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:81,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:82,YFAMYQENVAQTDVDTLYIIYEHYTWAVQAYTWY\nHLA-A03:83,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:84,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:85,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:86,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:87,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:88,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:89,YFAMYGEKVAHTHVDTLYIIYRDYTWAELAYTWY\nHLA-A03:90,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A03:92,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:93,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:94,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:95,YFAMYQENVAQTDVDTLYVRYRDYTWAELAYTWY\nHLA-A03:96,YFDMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A03:97,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTGY\nHLA-A03:98,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYMWY\nHLA-A03:99,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A1101,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1102,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1103,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A1104,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTWY\nHLA-A1105,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1106,YYAMYQENVAQTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A1107,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1108,YYAMYQENVAQTDVDTLYIIYRDYTWAERAYRWY\nHLA-A1109,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1110,YYAMYRNNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1111,YYAMYLQNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1112,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1113,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1114,YYAMYQENVAQTDVDTLYIIYRDYTWARQAYRWY\nHLA-A1115,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1116,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1117,YYAMYQENMAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A1118,YYAMYQENVAHTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A1119,YYAMYQENVAHTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1120,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A1121,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1122,YYAMYQENVAQTDVDTLYIIYPDYTWAAQAYRWY\nHLA-A1123,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1124,YYAMYQENVAQTDVDTLYIIYRDYTWAALAYRWY\nHLA-A1125,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYRWY\nHLA-A1126,YYAMYQENVAQTDVDTLYIMYRDYTWAAQAYRWY\nHLA-A1127,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTGY\nHLA-A1128,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1129,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1130,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A1131,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYRWY\nHLA-A1132,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:01,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:02,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:03,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A11:04,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTWY\nHLA-A11:05,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:06,YYAMYQENVAQTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:07,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:08,YYAMYQENVAQTDVDTLYIIYRDYTWAERAYRWY\nHLA-A11:09,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:10,YYAMYRNNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:100,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:101,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:102,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:103,YYAMYRENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:104,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:105,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:106,YYAMYQEKVVHTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:107,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:108,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:11,YYAMYLQNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:110,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:111,YYAMYQENVAQTDEDTLYIIYRDYTWAAQAYRWY\nHLA-A11:112,YYAMYQENVAQTDVDTLYIIYRDYTWAAQVYRWY\nHLA-A11:113,YYAMYQENVAQTDVDTLYIIYEHYTWAAQAYRWY\nHLA-A11:114,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:116,YYAMYQENVAQTDVDTLYIIYQDYTWAAQAYRWY\nHLA-A11:117,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:118,YYAMYQENVAQTDVDTLYIMYRDYTWAAQAYRWY\nHLA-A11:119,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:12,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:120,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:121,YYAMYGEKVAHTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:122,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:123,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:124,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:125,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:126,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:128,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:129,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:13,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:130,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTWY\nHLA-A11:131,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:132,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:133,YYSMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:134,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:135,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:136,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:138,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:139,YYAMYQENVAQTDVDTLYLMFRDYTWAAQAYRWY\nHLA-A11:14,YYAMYQENVAQTDVDTLYIIYRDYTWARQAYRWY\nHLA-A11:140,YYAMYQENVAQTDVDTLYIIYQDYTWAAQAYRWY\nHLA-A11:141,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:142,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:143,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:144,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:145,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:146,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:147,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:148,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:149,YYAMYQENVAQTDVDTLSIIYRDYTWAAQAYRWY\nHLA-A11:15,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:150,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:151,YYAMYQENVAQTDVDTLYIISRDYTWAAQAYRWY\nHLA-A11:152,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:153,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:154,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:155,YYAMYQENVAQTDVDTLYIIYRDYTWVAQAYRWY\nHLA-A11:156,YYAMYQDNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:157,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRRY\nHLA-A11:158,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYRWY\nHLA-A11:159,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:16,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:160,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:161,YYAMYQENVAQADVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:162,YYAMYQENVAQTDVDTLYIIYEHYTWAAQAYRWY\nHLA-A11:163,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:164,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:165,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:166,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:167,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:168,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:169,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:17,YYAMYQENMAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A11:171,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:172,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:173,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:174,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:175,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A11:176,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:177,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:178,YYAMYQENVAHTDENIAYIIYRDYTWAAQAYRWY\nHLA-A11:179,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:18,YYAMYQENVAHTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:181,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:183,YYAMYQENVAQTDVDTLYIIYRDYTWAVWAYRWY\nHLA-A11:184,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:185,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:186,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:187,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:188,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:189,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:19,YYAMYQENVAHTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:190,YYAMYQENVAQTDENIAYIIYRDYTWAAQAYRWY\nHLA-A11:191,YYAMYQENVAQTDVDTLYIIYRDYTWAEWAYRWY\nHLA-A11:192,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:193,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:194,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:195,YYAMYQENVAQTDVDTLYIIYRDYTWGAQAYRWY\nHLA-A11:196,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:197,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:198,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:199,YFAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:20,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A11:200,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:201,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:202,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:203,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:204,YYAMYQENVAQTDVDTLYIIYRDYTWAAEAYRWY\nHLA-A11:205,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:206,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:207,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:209,YYAMYQENVAQTDVDTLYIIYRDYTWAVQAYTGY\nHLA-A11:211,YYAMYQENVAQTDVDTLYIIYRDYTWAARVYRWY\nHLA-A11:212,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:213,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:214,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:216,YYAMYQENVAQTDVDTLYIIYWDYTWAAQAYRWY\nHLA-A11:217,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:218,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:219,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:22,YYAMYQENVAQTDVDTLYIIYPDYTWAAQAYRWY\nHLA-A11:220,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWD\nHLA-A11:221,YHAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:222,YFAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:223,YYAMYQENVAQTDANTLYIIYRDYTWAAQAYRWY\nHLA-A11:224,YYAMYQEKVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:225,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:226,YYAMYQENVAQTDVDTLYIIYRDYTWVARVYRWY\nHLA-A11:227,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:228,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:229,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:23,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:230,YCAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:231,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:232,YYAMYQENVAQTDVDTLYIIYRDCTWAAQAYRWY\nHLA-A11:233,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:234,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:236,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:237,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:239,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:24,YYAMYQENVAQTDVDTLYIIYRDYTWAALAYRWY\nHLA-A11:240,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:241,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:242,YYAMYQENVAQTDVDTLYITYRDYTWAAQAYRWY\nHLA-A11:243,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:244,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:245,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:246,YYAMYQENVAQTDVDALYIIYRDYTWAAQAYRWY\nHLA-A11:247,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:248,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:249,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:25,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYRWY\nHLA-A11:250,YYAMYQENVAHIDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:252,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:253,YYAMYQENVAQTDVATLYIIYRDYTWAAQAYRWY\nHLA-A11:254,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:255,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:257,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:258,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:259,YYAMYQENVAQTDVDSLYIIYRDYTWAAQAYRWY\nHLA-A11:26,YYAMYQENVAQTDVDTLYIMYRDYTWAAQAYRWY\nHLA-A11:260,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:261,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:262,YYAEYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:263,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:264,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYGWY\nHLA-A11:265,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:266,YYAIYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:267,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:268,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:269,YYAMYQENVAQTDVDTLYIIYRDYTWAAWAYRWY\nHLA-A11:27,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTGY\nHLA-A11:270,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:271,YYAMYQENVAQTDANTLYIIYRDYTWVARVYRGY\nHLA-A11:273,YYAMYQENVAQTDVDTLYIIYRSYTWAAQAYRWY\nHLA-A11:274,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:275,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:276,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:277,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:278,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:279,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:280,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:281,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:282,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:283,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:284,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:285,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:286,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:288,YYAMYQENVAQTDVDTLYVRYRDYTWAAQAYRWY\nHLA-A11:289,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:29,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:290,YYAMYQENVAQTDVDTLYIIYRDYTWARRVYRWY\nHLA-A11:291,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:292,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:293,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:294,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:295,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:296,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:297,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:298,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:299,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:30,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:300,YYAMYQENVAQTDVDTLYIIYRDYTWTAQAYRWY\nHLA-A11:301,YYAMYQENVTQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:302,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:303,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:304,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:305,YYAMYQENVAQNDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:306,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:307,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:308,YYAMYQENVAHTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:309,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:31,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYRWY\nHLA-A11:311,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:312,YYAMYQENVAHTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:32,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:33,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:34,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:35,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A11:36,YYAMYQENVAQTDVDTLYIICRDYTWAAQAYRWY\nHLA-A11:37,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:38,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRGY\nHLA-A11:39,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRGY\nHLA-A11:40,YYAMYQENVAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A11:41,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:42,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:43,YTAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:44,YYAMYQENVAQTDVDTLYIIYRDYTWAARAYRWY\nHLA-A11:45,YYAMYQENVAQTDADTLYIIYRDYTWAAQAYRWY\nHLA-A11:46,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:47,YHAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:48,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:49,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:51,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:53,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:54,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:55,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:56,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:57,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:58,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:59,YYAMYQENVAQTDVDTLYIIYGDYTWAAQAYRWY\nHLA-A11:60,YYAMYQENVAQTDVDTLYIIYRDYTWAVQAYRWY\nHLA-A11:61,YYAMYQENAAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:62,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:63,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:64,YYAMYQENVAQTDVDTLHIIYRDYTWAAQAYRWY\nHLA-A11:65,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:66,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:67,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:68,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:70,YYAMYGENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:71,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:72,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:73,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:74,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:75,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:76,YYAMYQENVAQTDVDTLYIIYRDYTRAAQAYRWY\nHLA-A11:77,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:79,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:80,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:81,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:82,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:83,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:84,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:85,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:86,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:87,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:88,YSAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:89,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:90,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYEWY\nHLA-A11:91,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:92,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:93,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:94,YYAMYQENVAQTDVDTLYIIYRDYTWAARVYRGY\nHLA-A11:95,YYAMHQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:96,YYAMYQENVSQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:97,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A11:98,YYAMYQENVAHIDVDTLYIIYRDYTWAAQAYRWY\nHLA-A2301,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2302,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A2303,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2304,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A2305,CSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2306,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2307,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2309,YSAMYQENMAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2310,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYRGY\nHLA-A2312,YSAMYEEKVAHTHENIAYLMFHYYTWAVLAYTGY\nHLA-A2313,YSAMYEEKVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2314,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2315,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2316,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:01,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:02,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A23:03,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:04,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A23:05,CSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:06,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:09,YSAMYQENMAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:10,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYRGY\nHLA-A23:12,YSAMYEEKVAHTHENIAYLMFHYYTWAVLAYTGY\nHLA-A23:13,YSAMYEEKVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:14,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:15,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:16,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:17,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:18,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:20,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:21,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:22,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:23,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:24,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:25,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:26,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:27,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:28,YSAMYQEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:29,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:30,YSAMYEEKVAHTDENIAYLMFHCYTWAVLAYTGY\nHLA-A23:31,YSAMYEEKVAHTDENIAYLMFDDYTWAVLAYTGY\nHLA-A23:32,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:33,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:34,YSAMYEEKVAHTDENIAYLMFHYYTWAVVAYTGY\nHLA-A23:35,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:36,YSAMYEEKVAHTDESIAYLMFHYYTWAVLAYTGY\nHLA-A23:37,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:39,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:40,YSAMYEEKVAHTDANIAYLMFHYYTWAVLAYTGY\nHLA-A23:41,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:42,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:43,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTRY\nHLA-A23:44,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:45,YSAMYEEKVAHTDENIAYLMFQDYTWAVLAYTGY\nHLA-A23:46,YSAMYEEKVAHTDENIAYLMFEHYTWAVLAYTGY\nHLA-A23:47,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:48,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:49,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:50,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:51,YSAMYEENVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:52,YSAMYEEKVAHTDENIAYLMFDYYTWAVLAYTGY\nHLA-A23:53,YSAMYEEKVAHTDENIAYLMFRDYTWAVLAYTGY\nHLA-A23:54,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:55,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:56,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:57,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:58,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:59,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:60,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:61,YSAMYKEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:62,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:63,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:64,YFAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:65,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:66,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A23:67,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:68,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:70,YSAMYEEKVAHTDENIAYLMFRDYTWAVLAYTGY\nHLA-A23:71,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:72,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:73,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:74,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:75,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:76,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:77,YSAMCEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:78,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:79,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:80,YSAMYGEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:81,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:82,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:83,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A23:85,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:86,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:87,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:88,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A23:89,YSAMYEEKVAHTDENIAYLMFHYCTWAVLAYTGY\nHLA-A23:90,YSAMYEEKVAHTDENIAHLMFHYYTWAVLAYTGY\nHLA-A23:92,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2402,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2403,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A2404,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A2405,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2406,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A2407,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2408,YSAMYGEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2409,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2410,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRWY\nHLA-A2411,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2413,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2414,YSAMYEEKVAHTDENIAYVRYHYYTWAVQAYTGY\nHLA-A2415,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A2417,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A2418,YSAMYEEKVAHTDENIAYLMFHYYTWAELAYTWY\nHLA-A2419,YSAMYEEKVAQTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A2420,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2421,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2422,YSAMYEEKVAHTDENIAYLMFHYYTWAVWVYTWY\nHLA-A2423,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A2424,YSAMYRNNVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2425,CSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2426,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2427,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2428,YSAMYEEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A2429,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2430,YSAMYEEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A2431,YSAMYEQKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2432,YSAMYEEKVAHTDESIAYLMFHYYTWAVQAYTGY\nHLA-A2433,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A2434,YSAMYEEKVAHIDENIAYLMFHYYTWAVQAYTGY\nHLA-A2435,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2437,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2438,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2439,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2440,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2441,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A2442,YSAMYGEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A2443,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2444,YSAMYEEKVAHTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A2446,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRGY\nHLA-A2447,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2449,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2450,YYAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2451,YSAMYEEKVAHTDENIAYLIYHYYTWAVQAYTGY\nHLA-A2452,YSAMYEEKVAHTDENIAYLRFHYYTWAVQAYTGY\nHLA-A2453,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A2454,YSAMYEEKVAHTDENIAYLMFHYYTWAVQPYTGY\nHLA-A2455,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYEGY\nHLA-A2456,YSAMYEEKVAHTDENIAYLMFHYYTWAEQAYTGY\nHLA-A2457,YSAMYEEKVAHTDENIAYIMYHYYTWAVQAYTGY\nHLA-A2458,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2459,YSAMYEEKVAHTDENIAYLMFHYYTWAAQAYTGY\nHLA-A2461,YSAMYEEKVAHTDEKIAYLMFHYYTWAVQAYTGY\nHLA-A2462,YSAMYEEKVAHTDENIAYLMFQDYTWAVQAYTGY\nHLA-A2463,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2464,YSAMYEEKVAHTDENIAYLWIHYYTWAVQAYTGY\nHLA-A2465,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A2466,YSAMYEEKVAHTDENIAYLMFEHYTWAVQAYTGY\nHLA-A2467,YSAMYRNNVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2468,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2469,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2470,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2471,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2472,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2473,YSAMYEEKVAHTDENIAYLMFDYYTWAVQAYTGY\nHLA-A2474,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2475,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2476,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2477,YSAMYQEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2478,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2479,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:02,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:03,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A24:04,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A24:05,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:06,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A24:07,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:08,YSAMYGEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:10,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRWY\nHLA-A24:100,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:101,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:102,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:103,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:104,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:105,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:106,YSAMYEEKVAHTDENIAYLMFDDYTWAVQAYTGY\nHLA-A24:107,YSAMYEEKVAHTDENIAYLMFHYYTWAVHAYTGY\nHLA-A24:108,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:109,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A24:110,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:111,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:112,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:113,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:114,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:115,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:116,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:117,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:118,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:119,YSAMYEEKVAHADENIAYLMFHYYTWAVQAYTGY\nHLA-A24:120,YSAMYEEKVAHTDENIAYIMFHYYTWAVQAYTGY\nHLA-A24:121,YSAMYEEKVAHTDENIAYLMFHSYTWAVQAYTGY\nHLA-A24:122,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:123,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:124,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:125,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A24:126,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:127,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:128,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:129,YSAMYQENMAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A24:13,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A24:130,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:131,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:133,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:134,YSAMYEEKVAHTDENIAYLMFHYYPWAVQAYTGY\nHLA-A24:135,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:136,YSAMYEEKVAHTDENIAYLMFHYYTWVVQAYTGY\nHLA-A24:137,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:138,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTWY\nHLA-A24:139,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:14,YSAMYEEKVAHTDENIAYVRYHYYTWAVQAYTGY\nHLA-A24:140,YSTMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:141,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:142,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:143,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYLGY\nHLA-A24:144,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:145,YSAMYEENVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:146,YSAMYEEKVAHTDENIAYLMFNYYTWAVQAYTGY\nHLA-A24:147,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:148,YSAMYEEKVAHTDENIAYLMFHYHTWAVQAYTGY\nHLA-A24:149,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:15,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A24:150,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:151,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:152,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:153,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:154,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:156,YSAMYEENVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:157,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:159,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:160,YSAMYEEKVAHTDENIAYLMFHYYTWAVQVYTGY\nHLA-A24:161,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:162,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:164,YSAMYEEKVAHTDENPLYLMFHYYTWAVQAYTGY\nHLA-A24:165,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:166,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:167,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTWY\nHLA-A24:168,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:169,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:17,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A24:170,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYMGY\nHLA-A24:171,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:172,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:173,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:174,YSAMYEEKVAHTDENIAYLMFDSYTWAVQAYTGY\nHLA-A24:175,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:176,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:177,YSAMYEEKVAHTDENIAYLMFHYYTWAEQAYTGY\nHLA-A24:178,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:179,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:18,YSAMYEEKVAHTDENIAYLMFHYYTWAELAYTWY\nHLA-A24:180,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:181,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:182,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:184,YSAMYEEKVAHTDENIACLMFHYYTWAVQAYTGY\nHLA-A24:186,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:187,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:188,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A24:189,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:19,YSAMYEEKVAQTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A24:190,YSAMYEEKVAHTDENIAYLMFHYYTWAAQAYTGY\nHLA-A24:191,YSAMYQENVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:192,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:193,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:194,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:195,YSAMYEEKVAHTDENIAYLMFHYYTSAVQAYTGY\nHLA-A24:196,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:197,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:198,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:199,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:20,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:200,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:201,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:202,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGN\nHLA-A24:203,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:204,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A24:205,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:206,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:207,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A24:208,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A24:209,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:21,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:210,YSAMYEEKVAHTDENIAYLMFHYYTWAAQAYRWY\nHLA-A24:212,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:213,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:214,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:215,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:216,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:217,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:218,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:219,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTRY\nHLA-A24:22,YSAMYEEKVAHTDENIAYLMFHYYTWAVWVYTWY\nHLA-A24:220,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGH\nHLA-A24:221,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:223,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:224,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:225,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:226,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:227,YSAMYEEKVAHTDENIAYLMFHYYTWAVPAYTGY\nHLA-A24:228,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A24:229,YSAMYEEKVAHTDENIAYLMFHYYTWVAQAYTGY\nHLA-A24:23,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A24:230,YSAMYEEKVAHTDENIAYLMFHYYTWAMQAYTGY\nHLA-A24:231,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:233,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:234,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:235,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:236,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:237,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:238,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:239,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:24,YSAMYRNNVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A24:241,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:242,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:243,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:244,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:245,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:246,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:247,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:248,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:249,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:25,CSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:250,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:251,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:253,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:254,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:255,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:256,YSAMYEEKVAHTDENIAYLMFHFYTWAVQAYTGY\nHLA-A24:257,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:258,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:259,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:26,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:260,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:261,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:262,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:263,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:264,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:265,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:266,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:267,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:268,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:269,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:27,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:270,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:271,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:272,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:273,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:274,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:275,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:276,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:277,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:279,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:28,YSAMYEEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A24:280,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:281,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:282,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:283,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:284,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:285,YSAMYEEKVAHTDENIAYLMFHYYTWAARAYTGY\nHLA-A24:286,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:287,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:288,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:289,YSAMYEEKVAHTDENIAYLMFHYYTWAERAYTWY\nHLA-A24:29,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:290,YSAMYRNNVAQTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A24:291,YSAMYEEKVAHTDENIAYLRYHYYTWAVQAYTGY\nHLA-A24:292,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:293,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:295,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:296,YSAMYEEKVAHTDENIAYLMYRDYTWAVQAYTGY\nHLA-A24:297,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:298,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:299,YSAMYEEKVAHTDENIAYLMFHYYTWAVRVYTWY\nHLA-A24:30,YSAMYEEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A24:300,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRWY\nHLA-A24:301,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:302,YSAMYEEKVAHTDANIAYLMFHYYTWAVQAYTGY\nHLA-A24:304,YSAMYEEKVAHTDENIAYLKFHYYTWAVQAYTGY\nHLA-A24:305,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:306,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:307,YSAMYEEKVAHTVENIAYLMFHYYTWAVQAYTGY\nHLA-A24:308,CSAMYEEKVAHTDENIAYLMFEHYTWAVQAYTGY\nHLA-A24:309,YSAMYREKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A24:31,YSAMYEQKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:310,YSAMYGEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:311,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:313,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:314,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:315,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYEWY\nHLA-A24:316,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:317,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:318,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:319,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:32,YSAMYEEKVAHTDESIAYLMFHYYTWAVQAYTGY\nHLA-A24:320,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:321,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:322,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:324,YSAMYEEKVAHTDENIAYVRYHYYTWAVQAYTGY\nHLA-A24:325,YSAMYEEKVAHTDENIAYLMFDYYTWAVQAYTGY\nHLA-A24:326,YSAMYKEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:327,YSAMYEEKVAHTDENIAYLMFHYYTWAVQTYTGY\nHLA-A24:328,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:329,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:33,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A24:330,YSAMYEEKVAHTDENIAYLMFHDYTWAVQAYTGY\nHLA-A24:331,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:332,YSSMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:333,YSAMYEEKVAHTDENIAYLMFEYYTWAVQAYTGY\nHLA-A24:334,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:335,YSAMYEEKVAHTDENIAYLMFHHYTWAVQAYTGY\nHLA-A24:336,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:337,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:338,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:339,YSAMYEEKVAQTDENIAYLMFHYYTWAVWAYTWY\nHLA-A24:34,YSAMYEEKVAHIDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:340,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYRWY\nHLA-A24:341,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:342,YSAMYGEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:343,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:344,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:345,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:346,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:347,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:348,YSAMYEEKVAYTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:349,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:35,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:350,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:351,YSAMYEEKVAHTDEDIAYLMFHYYTWAVQAYTGY\nHLA-A24:352,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:353,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:354,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:355,YSAMYEEKVAHTDENIAYLMFHYYTWAELAYTGY\nHLA-A24:356,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:358,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:360,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:361,YSAMYEERVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:362,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:363,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:364,YSAMCEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:365,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:366,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:367,YSAMYEEKVAHTDENIAYLMFHYYTWVEQAYTGY\nHLA-A24:368,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:369,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:37,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:371,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:372,YSAMYEEKVAHTDGNIAYLMFHYYTWAVQAYTGY\nHLA-A24:373,YSAMYEEKVAHTDENTLYLMFHYYTWAVQAYTGY\nHLA-A24:374,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYEWY\nHLA-A24:375,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:376,YSAMYEEKVAHTDENIAYLMFHYYTWAVKAYTGY\nHLA-A24:377,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:378,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:379,YSAMYEEKVAHTHVDIAYLMFHYYTWAVQAYTGY\nHLA-A24:38,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:380,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:381,YSAMYEEKVAHTDENIAYLMFHYYTWAEWAYTGY\nHLA-A24:382,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:383,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:384,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:385,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:386,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:387,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTWY\nHLA-A24:39,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:390,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:391,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:392,YSAMYRNNVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A24:393,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:394,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:395,YSAMYEEKVAHTDENIAYLMFHYYTWAAQAYTGY\nHLA-A24:397,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:398,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:399,YSAMYEEKVAHTDENELYLMFHYYTWAVQAYTGY\nHLA-A24:400,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:401,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:402,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:403,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:404,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:405,YSAMYEEKVAHTDENNAYLMFHYYTWAVQAYTGY\nHLA-A24:406,YSAMYEEKVAQTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A24:407,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:409,YSAMYGEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:41,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A24:410,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:411,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:412,YSAMYEEKVAHTDENIAYLVFHYYTWAVQAYTGY\nHLA-A24:413,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:414,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:415,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:416,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:417,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:418,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:419,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:42,YSAMYGEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A24:420,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:421,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:422,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:423,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:424,YSAMYEEKVAHTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A24:43,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:431,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:432,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:44,YSAMYEEKVAHTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A24:46,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRGY\nHLA-A24:47,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:49,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:50,YYAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:51,YSAMYEEKVAHTDENIAYLIYHYYTWAVQAYTGY\nHLA-A24:52,YSAMYEEKVAHTDENIAYLRFHYYTWAVQAYTGY\nHLA-A24:53,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A24:54,YSAMYEEKVAHTDENIAYLMFHYYTWAVQPYTGY\nHLA-A24:55,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYEGY\nHLA-A24:56,YSAMYEEKVAHTDENIAYLMFHYYTWAEQAYTGY\nHLA-A24:57,YSAMYEEKVAHTDENIAYIMYHYYTWAVQAYTGY\nHLA-A24:58,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:59,YSAMYEEKVAHTDENIAYLMFHYYTWAAQAYTGY\nHLA-A24:61,YSAMYEEKVAHTDEKIAYLMFHYYTWAVQAYTGY\nHLA-A24:62,YSAMYEEKVAHTDENIAYLMFQDYTWAVQAYTGY\nHLA-A24:63,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:64,YSAMYEEKVAHTDENIAYLWIHYYTWAVQAYTGY\nHLA-A24:66,YSAMYEEKVAHTDENIAYLMFEHYTWAVQAYTGY\nHLA-A24:67,YSAMYRNNVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:68,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:69,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:70,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:71,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:72,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:73,YSAMYEEKVAHTDENIAYLMFDYYTWAVQAYTGY\nHLA-A24:74,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:75,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:76,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:77,YSAMYQEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:78,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:79,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:80,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:81,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:82,YTAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:85,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:87,YSAMYEEKVAHTDENIAYLMFHYYTWAVRAYTGY\nHLA-A24:88,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:89,YSAMYGEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A24:91,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:92,YSAMYEEKVAHTDENIAYIIYHYYTWAVQAYTGY\nHLA-A24:93,YSAMYEEKVAHTDENIAYVMFHYYTWAVQAYTGY\nHLA-A24:94,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A24:95,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:96,YSAMYEEKVAHTDENIAYLMFNFYTWAVQAYTGY\nHLA-A24:97,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:98,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A24:99,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A2501,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A2502,YYAMYRNNVAQTDESIAYIRYQDYTWAEWAYRWY\nHLA-A2503,YFAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A2504,YYAMYRNNVAHTDESIAYIRYQDYTWAEQAYRWY\nHLA-A2505,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A2506,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYTWY\nHLA-A25:01,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:02,YYAMYRNNVAQTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:03,YFAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:04,YYAMYRNNVAHTDESIAYIRYQDYTWAEQAYRWY\nHLA-A25:05,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:06,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYTWY\nHLA-A25:07,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:08,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:09,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:10,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:11,YYAMYRNNVAHTHESIAYIRYQDYTWAEWAYRWY\nHLA-A25:13,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:14,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:15,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:16,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:17,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:18,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYGWY\nHLA-A25:19,YYAMYRNNVAHTDESIAYIRYQDYTWAELAYRWY\nHLA-A25:20,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:21,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:22,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:23,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:24,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:25,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:26,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:27,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:28,YYAMYRNNVAHTDESIAYIRYQDYTWAVWAYRWY\nHLA-A25:29,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:30,YFAMYRNNVAHTDESIAYIRYQDYTWAVLAYRWY\nHLA-A25:31,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:32,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:33,YYAMYRNNVAHTNESIAYIRYQDYTWAEWAYRWY\nHLA-A25:34,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:35,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:36,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYTGY\nHLA-A25:37,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:38,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:39,YYAMYQNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:40,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:41,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:43,YYAMYRNNVAHTDESIAYIIYRDYTWAEWAYRWY\nHLA-A25:44,YYAMYRNNVAHTDESIAYIRYQDYTWAVQAYRWY\nHLA-A25:45,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:46,YYAMYGEKVAHTDESMAYIRYQDYTWAEWAYRWY\nHLA-A25:47,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:48,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:50,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYLWY\nHLA-A25:51,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:52,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYQWY\nHLA-A25:53,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:54,YCAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A25:55,YYAMYRNNVAHADESIAYIRYQDYTWAEWAYRWY\nHLA-A25:56,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRSY\nHLA-A25:57,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A2601,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2602,YYAMYRNNVAHTDANTLYIRYQNYTWAEWAYRWY\nHLA-A2603,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A2604,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYLWY\nHLA-A2605,YYAMYRNNVAHTDENTLYIRYQDYTWAEWAYRWY\nHLA-A2606,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A2607,YYAMYGEKVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2608,YYAMYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A2609,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYTWY\nHLA-A2610,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2611,YYAMYRNNVAHTDANTLYIRYQDYTWPMSEPASD\nHLA-A2612,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A2613,YYAMYRNNVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2614,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2615,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2616,YSAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2617,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2618,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A2619,YYAMYQENVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2620,YFAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2621,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A2622,YYAMYRNNVAHTDANTLYVRYQDYTWAEWAYRWY\nHLA-A2623,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2624,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2626,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2627,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2628,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2629,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A2630,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A2631,YYAMYPNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2632,YYAMYRNNVAHTDANTLYMVYQDYTWAEWAYRWY\nHLA-A2633,YYAMYRNNVAQIHANTLYIRYQDYTWAEWAYRWY\nHLA-A2634,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYWWY\nHLA-A2635,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:01,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:02,YYAMYRNNVAHTDANTLYIRYQNYTWAEWAYRWY\nHLA-A26:03,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A26:04,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYLWY\nHLA-A26:05,YYAMYRNNVAHTDENTLYIRYQDYTWAEWAYRWY\nHLA-A26:06,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A26:07,YYAMYGEKVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:08,YYAMYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A26:09,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYTWY\nHLA-A26:10,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:100,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:101,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:102,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:103,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYGWY\nHLA-A26:104,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:105,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:106,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:108,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:109,YYAMYRNNVAHTDANTLCIRYQDYTWAEWAYRWY\nHLA-A26:110,YYAMYRNNVAHTDPNTLYIRYQDYTWAEWAYRWY\nHLA-A26:111,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A26:112,YYAMYRNNVAHTDANTLYIRYQDYTWAVLAYRWY\nHLA-A26:113,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:114,YYAMYWNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:115,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:116,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:117,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:118,YYAMYRNNVAHTDANTLYIRYQNYTWAEWAYRWY\nHLA-A26:119,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:12,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A26:120,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:121,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:122,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:123,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:124,YYAMYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A26:125,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:126,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:128,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:129,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:13,YYAMYRNNVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:130,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:131,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:132,YYAMYRNNVAHTDANTLYIRYQNYTWAEWAYRWY\nHLA-A26:133,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:134,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:135,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:136,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:137,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:138,YYAMYRNNVAHTDANTLYIRYQNYTWAEWAYRWY\nHLA-A26:139,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:14,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:140,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:141,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:142,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:143,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:144,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:146,YYAMYRNNVAHTHANTLYIRYQDYTWAEQAYRWY\nHLA-A26:147,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRSY\nHLA-A26:148,YYAMYRNHVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:149,YYAMYRNNVAHTDANTLYIRYQDYTWAEWVYRWY\nHLA-A26:15,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:150,YYAIYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A26:151,YYAMYRNIVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:152,YYAMYRNNVAHTDANTLYIRYQDYTWAAWAYRWY\nHLA-A26:153,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:154,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:155,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:156,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:157,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:158,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:159,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:16,YSAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:160,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:162,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:163,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:164,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:165,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:167,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:168,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:169,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:17,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:170,YYAMYQNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:171,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:172,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A26:173,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:174,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:175,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:176,YYAMYRNNVAHTDANTLYIMYQDYTWAEWAYRWY\nHLA-A26:177,YTAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A26:178,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:18,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A26:19,YYAMYQENVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:20,YFAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:21,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A26:22,YYAMYRNNVAHTDANTLYVRYQDYTWAEWAYRWY\nHLA-A26:23,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:24,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:26,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:27,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:28,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:29,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A26:30,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A26:31,YYAMYPNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:32,YYAMYRNNVAHTDANTLYMVYQDYTWAEWAYRWY\nHLA-A26:33,YYAMYRNNVAQIHANTLYIRYQDYTWAEWAYRWY\nHLA-A26:34,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYWWY\nHLA-A26:35,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:36,YYAMYRNNVAHTHANTLYIRYQDYTWAEWAYRWY\nHLA-A26:37,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:38,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:39,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:40,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:41,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:42,YYAIYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:43,YYAMYRNNVAHTDANTLYIRYQDYTWAELAYRWY\nHLA-A26:45,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:46,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:47,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:48,YYAMYRNKVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A26:49,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A26:50,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:51,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:52,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:53,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:54,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:55,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:56,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:57,YYAMYRNNVAHTDANILYIRYQDYTWAEWAYRWY\nHLA-A26:58,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:59,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:61,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:62,YYAMYRNNMAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:63,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:64,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:65,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:66,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:67,YYAMYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A26:68,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:69,YYAMYRNKVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:70,YYAMYRNNVAHTDANTLYIMYQDYTWAEWAYRWY\nHLA-A26:72,YYAMYQENMAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:73,YYAMYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A26:74,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:75,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:76,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:77,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:78,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYLWY\nHLA-A26:79,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:80,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:81,YYALYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A26:82,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:83,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:84,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:85,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:86,YYAMYRNNVAHTDANTLYIRYQDYTWAVQAYRWY\nHLA-A26:87,YYAMYRNNVAHTDANTLYLRYQDYTWAEWAYRWY\nHLA-A26:88,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:89,YHAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:90,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:91,YYAMYRNNVAHTDANTLYIRYQDYTWAVQAYTWY\nHLA-A26:92,YYAMYGEKVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A26:93,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:94,CYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:95,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:96,YYTMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:97,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:98,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A26:99,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A2901,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2902,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2903,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTGY\nHLA-A2904,YTAMYLQHVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2905,YTAMYLQNVAQTDANTLYIMYRDYTWAEQAYTWY\nHLA-A2906,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2907,YTAMYLQNVAQTDANTLYLMFRDYTWAVLAYTWY\nHLA-A2909,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2910,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2911,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2912,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2913,YTAMYLQNVAQTDESIAYIMYRDYTWAVLAYTWY\nHLA-A2914,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2915,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A2916,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:01,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:02,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:03,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTGY\nHLA-A29:04,YTAMYLQHVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:05,YTAMYLQNVAQTDANTLYIMYRDYTWAEQAYTWY\nHLA-A29:06,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:07,YTAMYLQNVAQTDANTLYLMFRDYTWAVLAYTWY\nHLA-A29:09,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:10,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:100,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:101,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:102,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:103,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:104,YTAMYLQNVAQTDANTLYIMYRDYTWAAQAYTWY\nHLA-A29:105,YTAMYRNNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:106,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:107,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:108,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:109,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:11,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:110,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:111,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:113,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:114,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:115,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:116,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:117,CTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:118,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:119,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:12,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:120,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:121,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:122,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:123,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:124,YTAMYLQNVAQTDANTLYIMYRDYTWTVLAYTWY\nHLA-A29:125,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:127,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:128,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:13,YTAMYLQNVAQTDESIAYIMYRDYTWAVLAYTWY\nHLA-A29:14,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:15,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:16,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:17,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:18,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:19,YTAMYLQNVAHTHVDTLYIMYRDYTWAVLAYTWY\nHLA-A29:20,YTAMYLQNVAHTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:21,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:22,YTAMYLQNVAQTDANTLYVRYRDYTWAVLAYTWY\nHLA-A29:23,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:24,YTAMYLQNVAQTDANTLYIMYRDYTWAVLGYTWY\nHLA-A29:25,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:26,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:27,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:28,YTAMYLQNVAQTDANTLYIMYRDYTWAVWAYTWY\nHLA-A29:29,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:30,YTAMYLQNVARTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:31,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:32,YTAMYLQNVAQTDANTLYIMYQDYTWAVLAYTWY\nHLA-A29:33,YTAMYLQNVAQTDANTLYIMYRDYTWAVQAYTGY\nHLA-A29:34,YTAMYLQNVAHTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:35,YTAMYLQNVAQTDANTLYIMYRDYTWAMLAYTWY\nHLA-A29:36,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:37,YSAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:38,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:39,YTAMYLQNVAQIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A29:40,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:41,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:42,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:43,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:44,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:45,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:46,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:47,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:48,YTAMYGEKVAHTHVDTLYIMYRDYTWAVLAYTWY\nHLA-A29:49,YTAMYLQNVAQTDANTLYIMFRDYTWAVLAYTWY\nHLA-A29:50,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:51,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:52,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:53,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:54,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:55,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:56,YSAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:57,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:58,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:59,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:60,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:61,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:62,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:63,YTAMNLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:64,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:65,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:66,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:67,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYRWY\nHLA-A29:68,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:69,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:70,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYAWY\nHLA-A29:71,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:72,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:73,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:74,YTAMYLQNVAHTHANTLYIMYRDYTWAVLAYTWY\nHLA-A29:75,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:76,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:77,YTAMYLQNVAQTDANTLYIMYRDYTWAVQAYTWY\nHLA-A29:79,YTAMYLQNVAQTDANTLYIMYRDYTWAVRAYTWY\nHLA-A29:80,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:81,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:82,YTAMYLQNVAQTDANTLYIMYGDYTWAVLAYTWY\nHLA-A29:83,YTAMYLQNVAQTDANTLYIMYRDYTWAELAYTWY\nHLA-A29:84,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:85,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:86,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:87,YTAMYLQNVAQTDANTLYIMYRDYTWAVQAYTWY\nHLA-A29:88,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:89,YTAMYLQNVAQTDANTLYIRYRDYTWAVLAYTWY\nHLA-A29:90,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:91,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:92,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:93,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:94,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:95,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:96,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:97,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A29:98,YTAMYLQNVAHTDENTLYIMYRDYTWAVLAYTWY\nHLA-A29:99,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A3001,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A3002,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A3003,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A3004,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A3006,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A3007,YSAMYEEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A3008,YYAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A3009,YSAMYQENVAHTDENTLYIIYEHYTWAVLAYTWY\nHLA-A3010,YSAMYQENVAHTDENTLYIIHEHYTWARLAYTWY\nHLA-A3011,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A3012,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A3013,YSAMYQENVAHTHVDTLYIIYEHYTWARLAYTWY\nHLA-A3014,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A3015,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A3016,YSAMYQENVAQTHVDTLYIIYEHYTWAWLAYTWY\nHLA-A3017,YSAMYQENVAQTDVDTLYIIYEHYTWAVWAYTWY\nHLA-A3018,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A3019,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A3020,YSAMYQENVAQTEVDTLYIIYEHYTWAWLAYTWY\nHLA-A3021,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A3022,YSAMYGEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:01,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:02,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:03,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:04,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A30:06,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A30:07,YSAMYEEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:08,YYAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:09,YSAMYQENVAHTDENTLYIIYEHYTWAVLAYTWY\nHLA-A30:10,YSAMYQENVAHTDENTLYIIHEHYTWARLAYTWY\nHLA-A30:100,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:102,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:103,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A30:104,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:105,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A30:106,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:107,YSAMYQENVAHTDENTLYIIYEHYIWARLAYTWY\nHLA-A30:108,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:109,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:11,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:110,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:111,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:112,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:113,YSAMYQENVAQTDVDTLYLIYEHYTWAWLAYTWY\nHLA-A30:114,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:115,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:116,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:117,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYRWY\nHLA-A30:118,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:119,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:12,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:120,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:122,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTSY\nHLA-A30:124,YSSMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:125,YSAMYQENVAQTDVDTLYIIYEHYTWARLAYTWY\nHLA-A30:126,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:127,YSAMYQENVAHTHENTLYIIYEHYTWARLAYTWY\nHLA-A30:128,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:129,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:13,YSAMYQENVAHTHVDTLYIIYEHYTWARLAYTWY\nHLA-A30:131,YSAMYQENVAQTDVDTLYIVYEHYTWAWLAYTWY\nHLA-A30:133,NSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:134,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:135,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:136,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:137,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:138,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:139,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A30:140,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:141,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:142,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:143,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:144,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:14L,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:15,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:16,YSAMYQENVAQTHVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:17,YSAMYQENVAQTDVDTLYIIYEHYTWAVWAYTWY\nHLA-A30:18,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:19,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:20,YSAMYQENVAQTEVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:22,YSAMYGEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:23,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:24,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:25,YSAMYQENVAQTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:26,YSAMYQENVAQTDVDTLYIIYRDYTWAWLAYTWY\nHLA-A30:28,YSAMYQENVAHTDENTLYIVYEHYTWARLAYTWY\nHLA-A30:29,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTSY\nHLA-A30:30,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:31,YSAMYQENVARTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:32,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:33,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:34,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:35,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:36,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:37,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:38,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:39,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:40,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:41,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:42,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:43,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYMWY\nHLA-A30:44,YSAMYQENVAHTHVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:45,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:46,YSAMYQENVAHTHVDTLYIIYEHYTWAVWAYTWY\nHLA-A30:47,YSAMYQENVAHTDENTLYVIYEHYTWARLAYTWY\nHLA-A30:48,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:49,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:50,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:51,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:52,YSAMYQENVAQTDVDTLYLRYEHYTWAWLAYTWY\nHLA-A30:53,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:54,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:55,YFAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:56,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:57,YSAMYQENVAHTDANTLYIIYEHYTWARLAYTWY\nHLA-A30:58,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:60,YSAMYQENAAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:61,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:62,YSAMYQENVAQTDVDNLYIIYEHYTWAWLAYTWY\nHLA-A30:63,YSAMYQENVAQTDVDTLYIIYEHYTWAWLTYTWY\nHLA-A30:64,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:65,YSAMYQENVAQTDLDTLYIIYEHYTWAWLAYTWY\nHLA-A30:66,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:67,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:68,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:69,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:71,YSAMYQENVAQADVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:72,YSAMYQENVAQTDVDTLYTIYEHYTWAWLAYTWY\nHLA-A30:74,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:75,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTGY\nHLA-A30:77,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A30:79,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:80,YSAMYQENVAHTDENTLYLIYEHYTWARLAYTWY\nHLA-A30:81,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:82,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:83,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:84,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:85,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A30:86,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:87,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:88,YSAMYQENVAQTDENTLYIIHEHYTWARLAYTWY\nHLA-A30:89,YSAMYQENVAQTDVDTLYIIYEHYTWAELAYTWY\nHLA-A30:90,YSAMYQENVAHTDANTLYIIYEHYTWAVWAYTWY\nHLA-A30:91,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:92,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:93,YSAMYQENVAQTDVDTLYIIYQHYTWAWLAYTWY\nHLA-A30:94,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:95,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:96,YSSMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:97,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:98,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A30:99,YSAMYQENVAHTDENTLYIIYEHYTWAVQAYTWY\nHLA-A3101,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3102,YTAMYQEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3103,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A3104,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A3105,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A3106,YTAMYQENVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A3107,YTAMYQEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A3108,YTAMYEEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A3109,YTAMYQENVGHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3110,YTAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A3111,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3112,YTAMYQENVAHIDVDTLYIKYQDYTWAVLAYTWY\nHLA-A3113,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3114,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3115,YTAMYQENVARIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3116,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3117,YTAMYQENVAHINVDTLYIMYQDYTWAVLAYTWY\nHLA-A3118,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTRY\nHLA-A31:01,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:02,YTAMYQEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:03,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A31:04,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A31:05,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A31:06,YTAMYQENVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A31:07,YTAMYQEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A31:08,YTAMYEEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A31:09,YTAMYQENVGHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:10,YTAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A31:100,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:101,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:102,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:103,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:104,YTAMYRENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:105,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:106,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:107,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:108,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:109,YTAMYGEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:11,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:110,YTAMYQENVAHIDVDTLYIMYQDYTWAELAYTWY\nHLA-A31:111,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:112,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:113,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:114,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:115,YTAMYQENVAQTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:116,YTAMYQENVAHIDVDTLYIMYQDCTWAVLAYTWY\nHLA-A31:117,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:118,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:119,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:12,YTAMYQENVAHIDVDTLYIKYQDYTWAVLAYTWY\nHLA-A31:120,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:121,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:122,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:123,YTAMYQENVAHIDVDTLYIRYQDYTWAVLAYTWY\nHLA-A31:124,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:125,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:127,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:128,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:129,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTCY\nHLA-A31:13,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:130,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:132,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:133,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:134,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:135,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:136,YTAMYQENVAHIDVDTLYIMYQDYTWAVQAYTWY\nHLA-A31:137,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:138,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:139,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:140,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:142,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:143,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:144,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:145,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:146,YTAMYQENVAHIDVDTLYLMYQDYTWAVLAYTWY\nHLA-A31:147,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:148,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:15,YTAMYQENVARIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:16,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:17,YTAMYQENVAHINVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:18,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTRY\nHLA-A31:19,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:20,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:21,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:22,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:23,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:24,YTAMYQENVAHIDVDTLYIMYQDYTWAAQAYRWY\nHLA-A31:25,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYRWY\nHLA-A31:26,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:27,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:28,YTAMYQENVTHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:29,YTAMYQENVAHIDVDTLYLMFQDYTWAVLAYTWY\nHLA-A31:30,YTAMYQENVAHIDVDTLYIMYQDYTWAVWAYTWY\nHLA-A31:31,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:32,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:33,YTAMYQENVAHIDGDTLYIMYQDYTWAVLAYTWY\nHLA-A31:34,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:35,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:36,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:37,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:38,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:39,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:40,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:41,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:42,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:43,YTAMYQENVAHIDVDTLYIMYQDYTWAALAYTWY\nHLA-A31:44,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:45,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:46,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:47,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:48,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:49,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:50,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:51,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:52,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:53,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:54,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:55,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:56,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:57,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:58,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:59,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:61,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTSY\nHLA-A31:62,YTAMYQENVAHIDVDTLYIMYQDYTWVALAYTWY\nHLA-A31:63,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:64,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:65,YTAMYQENVAHIDVDTLHIMYQDYTWAVLAYTWY\nHLA-A31:66,YTAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:67,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:68,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:69,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:70,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:71,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:72,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:73,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:74,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:75,YTAMYKENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:76,YTAMYQENVAHIDVDTLYIMYQDYTWTVLAYTWY\nHLA-A31:77,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:78,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:79,YTAMYQENVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A31:80,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:81,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:82,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:83,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:84,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:85,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:86,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:87,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:88,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A31:89,YTAMYQENVAQTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:90,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:91,YTAMYQEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:92,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:93,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:94,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:95,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:96,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:97,YTAMYQENVAHIDVDTLYIMYQDYTWAERAYTWY\nHLA-A31:98,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A31:99,YTAMYQENVAHIDVDTLYVRYQDYTWAVLAYTWY\nHLA-A3201,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A3202,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A3203,YFAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A3204,YFAMYQENVAHTDESIAYIIYRDYTWAELAYTWY\nHLA-A3205,YFAMYQEKVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A3206,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A3207,YSAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A3208,YFAMYQENVAHTHESIAYIMYQDYTWAVLAYTWY\nHLA-A3209,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTRY\nHLA-A3210,YFAMYQENVAHTDESIAYIMYQDYTWAEWAYTWY\nHLA-A3211,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYEWY\nHLA-A3212,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A3213,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTGY\nHLA-A3214,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A3215,YFAMYRNNVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:01,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:02,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A32:03,YFAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A32:04,YFAMYQENVAHTDESIAYIIYRDYTWAELAYTWY\nHLA-A32:05,YFAMYQEKVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:06,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:07,YSAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:08,YFAMYQENVAHTHESIAYIMYQDYTWAVLAYTWY\nHLA-A32:09,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTRY\nHLA-A32:10,YFAMYQENVAHTDESIAYIMYQDYTWAEWAYTWY\nHLA-A32:100,YFAMYQENVVHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:102,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:103,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:104,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:105,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:106,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:107,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:108,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:109,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:110,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:111,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:113,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:114,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:115,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:116,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:118,YFAMYQENVAHTDESIAYIMYQDYTWAVWAYTWY\nHLA-A32:119,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:12,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:120,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:121,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:13,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTGY\nHLA-A32:14,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:15,YFAMYRNNVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:16,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:17,YFAMYQENVAQTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:18,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:20,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:21,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:22,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A32:23,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:24,YFAMYQENMAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:25,YFAMYHENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:26,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYRWY\nHLA-A32:28,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:29,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:30,YFAMYQENVAHTDESIAYIMYRDYTWAVLAYTWY\nHLA-A32:31,YFAMYQEKVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:32,YFAMYQENVAHTDESIAYIMYRDYTWAVLAYTWY\nHLA-A32:33,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:34,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:35,YFALYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:36,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:37,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:38,YFAMYQENVAHTDERIAYIMYQDYTWAVLAYTWY\nHLA-A32:39,YFAMYRENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:40,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:41,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:42,YTAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:43,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:44,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:46,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:47,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:49,YFAMYQENVAYTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:50,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:51,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:52,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:53,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:54,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:55,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:57,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:58,YFARYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:59,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:60,YFAMYQENVAHTDESIACIMYQDYTWAVLAYTWY\nHLA-A32:61,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:62,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:63,YFAMYQENVAHTDESIAYIMYQDYTWAELAYTWY\nHLA-A32:64,YYAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:65,YFAMYQENVAHTDESIAYIIYQDYTWAVLAYTWY\nHLA-A32:66,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:67,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:68,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:69,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:70,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:71,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:72,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:73,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:74,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:75,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:76,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:77,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:78,YFAMYQENVAHTDEIIAYIMYQDYTWAVLAYTWY\nHLA-A32:79,YFAMYEEKVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:80,YFAMYQENVAHTDESALYIMYQDYTWAVLAYTWY\nHLA-A32:81,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTSY\nHLA-A32:82,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:83,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:84,YFAMYQENVAHIDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:85,YFAIYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:86,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:87,YFAMYQENVAHTDESIAYIMYQDYTWALLAYTWY\nHLA-A32:88,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:89,YFAMYQENVAHTDESIAYLNYQDYTWAVLAYTWY\nHLA-A32:90,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:91,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:93,YFAMYGENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:94,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:95,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:96,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:97,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:98,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A32:99,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A3301,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A3303,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3304,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A3305,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A3306,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3307,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A3308,YTAMYGEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3309,YTAMYGENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3310,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A3311,YTAMYRNNVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3312,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3313,YTAMYRNNVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A33:01,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:03,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:04,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:05,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:06,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:07,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:08,YTAMYGEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:09,YTAMYGENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:10,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A33:100,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:101,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:102,YTAMYRNNVAHIDVDTLYIMYQDYTWAVQAYTWY\nHLA-A33:103,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:104,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:105,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:106,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWN\nHLA-A33:107,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:108,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:109,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:11,YTAMYRNNVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:110,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:111,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:112,YTAMYRNNVAHIDVDTLYIMYQDYTWARLAYTWY\nHLA-A33:113,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:114,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:115,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:116,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:117,YTAMYRNNVAHIGVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:118,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:119,YSAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:12,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:120,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYMWY\nHLA-A33:121,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:122,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:124,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:125,YTAMYRNNVAQTDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:126,YTAMYRNNVAHIDVDTLYIMHQDYTWAVLAYTWY\nHLA-A33:127,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:128,YTAMYRNNVAYIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:13,YTAMYRNNVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A33:130,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:131,YTAMYRNNVAQTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:132,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTRY\nHLA-A33:133,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:134,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:135,YTAMYRSNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:136,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:137,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:138,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:139,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:14,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:141,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:142,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:144,YTAMYRNNVAHIDVDTLYIIYQDYTWAVLAYTWY\nHLA-A33:145,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:146,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:147,YTAMYRNNVAHTDVNTLYIMYQDYTWAVLAYTWY\nHLA-A33:148,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:149,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:15,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:150,YTAMYRNNVAHIDVDTLYIMYQDYTWAVQAYTWY\nHLA-A33:151,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:152,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:153,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:155,YTAMYRNNVAHIDVDTLYIMYQDYTWAVWAYTWY\nHLA-A33:158,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:159,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:16,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:160,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:161,YTAMYRNNVAHIDMDTLYIMYQDYTWAVLAYTWY\nHLA-A33:162,YTAMYRNNVAHIDVDTLYIKYQDYTWAVLAYTWY\nHLA-A33:163,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:164,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:165,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:166,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:167,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLPYTWH\nHLA-A33:168,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:169,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:17,YTAMYRNNVAHIDADTLYIMYQDYTWAVLAYTWY\nHLA-A33:170,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:18,YTAMYRNNVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A33:19,YTAMYRNNVAHIDVDTLYLMFHYYTWAVQAYTGY\nHLA-A33:20,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:21,YTAMYEENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:22,YTAMYRNNVAHIDVDTLYVRYQDYTWAVLAYTWY\nHLA-A33:23,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:24,YTAMYRNNVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:25,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:26,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:27,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:28,YTAMYRNNVAHIDVDTLYIMYQDYTWAELAYTWY\nHLA-A33:29,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:30,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:31,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:32,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:33,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:34,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:35,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:36,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:37,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:39,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:40,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:41,YTAMYRNNVAHIDVDTPYIMYQDYTWAVLAYTWY\nHLA-A33:42,YTAMYRNNVSHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:43,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:44,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:45,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:46,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:47,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:48,YTAMYRNNVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A33:49,YTAMYRNNVAHIDVDTLYIIYQDYTWAVLAYTWH\nHLA-A33:50,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:51,YYAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:52,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:53,YTAMYEEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:54,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:55,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:56,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:57,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:58,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:59,YTAMYRNNVAHIDVDTLYIMYQDYTWAAQAYTWY\nHLA-A33:60,YTSMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:61,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYRWY\nHLA-A33:62,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:63,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:64,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:65,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:66,YTAMYQNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:67,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:68,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:69,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:70,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:71,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:72,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:75,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:76,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:77,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:78,YTDMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:79,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:81,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:82,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:83,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:84,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:85,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:86,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:87,YTAMYRNNVAHFDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:88,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:89,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:90,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:91,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:92,YAAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A33:93,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:94,YTAMYRNNVAHIDVDTLYIMYQDYTWAVWAYTWY\nHLA-A33:95,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:97,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A33:98,YTAMYRNNVAHIDVDTLYIMYQDYTRAVLAYTWY\nHLA-A33:99,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A3401,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A3402,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A3403,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A3404,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A3405,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A3406,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYTWY\nHLA-A3407,YYAMYRNNVSQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A3408,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:01,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A34:02,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:03,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:04,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:05,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A34:06,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYTWY\nHLA-A34:07,YYAMYRNNVSQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:08,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:09,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:11,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYAWY\nHLA-A34:12,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A34:13,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:14,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A34:15,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:16,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A34:17,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A34:18,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A34:19,YYAMYQENVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A34:20,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A34:21,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A3601,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A3602,YFAMYQENMAHTDANTLYIIYRDYTWVARAYTWY\nHLA-A3603,YFAMYQENMAHTDANTLYLMYRDYTWVARVYTWY\nHLA-A3604,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRWY\nHLA-A36:01,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A36:02,YFAMYQENMAHTDANTLYIIYRDYTWVARAYTWY\nHLA-A36:03,YFAMYQENMAHTDANTLYLMYRDYTWVARVYTWY\nHLA-A36:04,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRWY\nHLA-A36:05,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A36:06,YFAMYQENMAHTDANTLYIIYRDCTWVARVYTWY\nHLA-A36:07,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A36:08,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A4301,YYAMYLQNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A43:01,YYAMYLQNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A6601,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A6602,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A6603,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A6604,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWH\nHLA-A6605,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A6606,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYRWY\nHLA-A66:01,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:02,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A66:03,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A66:04,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWH\nHLA-A66:05,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:06,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYRWY\nHLA-A66:07,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:08,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:09,YYAMYRNNVAQTDVDTLYVRYQDYTWAEWAYRWY\nHLA-A66:10,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRGY\nHLA-A66:11,YYAMYRNNVAQTDADTLYIRYQDYTWAEWAYRWY\nHLA-A66:12,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:13,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:14,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:15,YYAMYRNNVAHIDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:16,YYAMYRNNVAQTDVDTLYIRYQDYTWVEWAYEWY\nHLA-A66:17,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:18,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:19,YYAMYRNNVAQTDVDTLYIRYQDYTWAERAYRWY\nHLA-A66:20,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:21,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A66:22,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:23,YYAMYQENVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:24,YYAMYRNNVAQTDVDTLHIRYQDYTWAEWAYRWY\nHLA-A66:25,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A66:29,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:30,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:31,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A66:32,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A6801,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6802,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A6803,YYAMYRNNVAHTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6804,YYAMYRNNVAHIDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6805,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A6806,YYAMYRNNVAQTDVDTLYIMYEHYTWAVWAYTWY\nHLA-A6807,YYAMYRNNVAQTDVDTLYIMYRHYTWAVWAYTWY\nHLA-A6808,YYAMYRNNVAQTDVDTLYIMYRDYTWAVLAYTWY\nHLA-A6809,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTWY\nHLA-A6810,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6812,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6813,YYAMYRENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6814,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6815,YYAMYRNNVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A6816,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6817,YYAMYRNNVAQTDVDTLYIMYRVYTWAVWAYTWY\nHLA-A6819,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6820,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A6821,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6822,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6823,YYAMYRNNVAQTDVDTLYIRYRDYTWAVWAYTWY\nHLA-A6824,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6825,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6826,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTGY\nHLA-A6827,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A6828,YYAMYRNNVAQTDVDTLYIRYHYYTWAVRAYTWY\nHLA-A6829,YTAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6830,YYAMYGENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6831,YYAMYRNNVAHTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A6832,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6833,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6834,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYMWY\nHLA-A6835,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6836,YYAMYRNNVAQTDENIAYIMYRDYTWAVWAYTWY\nHLA-A6837,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6838,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6839,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6840,YYAMYRNNVGQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:01,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:02,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:03,YYAMYRNNVAHTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:04,YYAMYRNNVAHIDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:05,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:06,YYAMYRNNVAQTDVDTLYIMYEHYTWAVWAYTWY\nHLA-A68:07,YYAMYRNNVAQTDVDTLYIMYRHYTWAVWAYTWY\nHLA-A68:08,YYAMYRNNVAQTDVDTLYIMYRDYTWAVLAYTWY\nHLA-A68:09,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTWY\nHLA-A68:10,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:100,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:101,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:102,YYTMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:103,YYAMYRNNVAQTDVDTLYIIYRDYTWAVWAYTWY\nHLA-A68:104,YYAMYRNNVAQTDVDTLYIMYHYYTWAVWAYTWY\nHLA-A68:105,YYAMYRNNVAQTDVDTLYVRYRDYTWAVWAYTWY\nHLA-A68:106,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:107,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:108,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWTYTWY\nHLA-A68:109,YYAMYRNNVAHTDENTLYIMYRDYTWAVWAYTWY\nHLA-A68:110,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:111,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:112,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:113,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWVYTWY\nHLA-A68:114,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:115,YYAMYRNNVAQTDVDTLYIMYRDYTWAVRVYTGY\nHLA-A68:116,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:117,YSAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:118,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:119,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:12,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:121,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:122,YYAIYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:123,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:124,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:125,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:126,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:127,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:128,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:129,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTWY\nHLA-A68:13,YYAMYRENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:130,YYAMYLQNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:131,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTGY\nHLA-A68:132,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:133,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:134,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYRWY\nHLA-A68:135,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:136,YYAMYRNNVAQTDENTLYIMYRDYTWAVWAYTWY\nHLA-A68:137,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:138,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:139,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:14,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:140,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:141,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:143,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:144,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:145,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:146,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:147,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:149,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:15,YYAMYRNNVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:150,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:151,YYAMYRNNVAQTDVDTLYITYRDYTWAVWAYTWY\nHLA-A68:152,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:153,YYAMYRNNVAQTDVATLYIRYHYYTWAVWAYTWY\nHLA-A68:154,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:155,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:156,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:157,YYAMYRNNVAQTDVDTLYIMYRDYTWAVLAYEWY\nHLA-A68:158,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:16,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:160,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:161,YYAMYRNNVAQTDVDTLYIMYPDYTWAVWAYTWY\nHLA-A68:162,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:163,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:164,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:165,YYAMYRNNVAQTDVDTLHIMYRDYTWAVWAYTWY\nHLA-A68:166,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:167,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:168,YYAMYRNNVAQTDVDTLYIMYRDYTWAVLAYTWY\nHLA-A68:169,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:17,YYAMYRNNVAQTDVDTLYIMYRVYTWAVWAYTWY\nHLA-A68:170,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:172,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:173,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:174,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:175,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:176,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:177,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:178,YYAMYQENVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:179,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:180,YYAMYRNNVAHTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:183,YYAMYRNNVAQTDVDTLYIMYRDYTWAEWAYTWY\nHLA-A68:184,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:185,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:186,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:187,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:188,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:189,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:19,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:190,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:191,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:192,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:193,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:194,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:195,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:196,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:197,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:198,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:20,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:200,YYAMYRNNVAQTDVDTLYIMYRDYAWAVWAYTWY\nHLA-A68:201,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:202,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:204,YYAMYRNNVAQTDVDTLYIMYRDYTWAEQAYTWY\nHLA-A68:21,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:22,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:23,YYAMYRNNVAQTDVDTLYIRYRDYTWAVWAYTWY\nHLA-A68:24,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:25,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:26,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTGY\nHLA-A68:27,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:28,YYAMYRNNVAQTDVDTLYIRYHYYTWAVRAYTWY\nHLA-A68:29,YTAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:30,YYAMYGENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:31,YYAMYRNNVAHTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:32,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:33,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:34,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYMWY\nHLA-A68:35,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:36,YYAMYRNNVAQTDENIAYIMYRDYTWAVWAYTWY\nHLA-A68:37,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:38,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:39,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:40,YYAMYRNNVGQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:41,YYAMYRNNVAQTDVDTLYIMYRDYTWVVWAYTWY\nHLA-A68:42,YYAMYRNNVAQTDVDTLYIMYRDYTWAEWAYTWY\nHLA-A68:43,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:44,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:45,YSAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:46,YYAMYRNNVAQTDVNTLYIMYRDYTWAVWAYTWY\nHLA-A68:47,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:48,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:50,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:51,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:52,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:53,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:54,YYAMYRNNVAQTDVDTLYIRYHYYTWAEWAYTWY\nHLA-A68:55,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:56,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:57,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:58,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:60,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:61,YYAMYRNNVAQTDVDTLYIRYHYYTWAEWAYRWY\nHLA-A68:62,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:63,YYAMYRNNVAQTDVDTLYIMYRDYTWAELAYTWY\nHLA-A68:64,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:65,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTGY\nHLA-A68:66,YYAMYQENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:67,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:68,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:69,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:70,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:71,YFAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:72,YYAMYRNNVTQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:73,YYAMYRNNVAQTDVDTLYIMYRDYTWAVRAYTWY\nHLA-A68:74,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:75,YYAMYGNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:76,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:77,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:78,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:79,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:80,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:81,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:82,YYAMYRNNVAQTDVDTLYIRCHYYTWAVWAYTWY\nHLA-A68:83,YYAMYRNNVAQTDVDTLYVMYRDYTWAVWAYTWY\nHLA-A68:84,YYAMYRNNVAQTDANTLYIMYRDYTWAVWAYTWY\nHLA-A68:85,YYAMYRNNVVQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:86,YYAMYRNNVAQTDVDILYIRYHYYTWAVWAYTWY\nHLA-A68:87,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:88,YYAMYRNNVAQTDVDTLYIMYRYYTWAVWAYTWY\nHLA-A68:89,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:90,YYAMYRNNVAHTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:91,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:92,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:93,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:95,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:96,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:97,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A68:98,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A68:99,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A6901,YYAMYRNNVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A69:01,YYAMYRNNVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A69:02,YYAMYRNNVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A69:03,YYAMYRNNVAQTDVDTLYVRYHYYTWAVQAYTWY\nHLA-A69:04,YYAMYRNNVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A69:05,YYAMYRNNVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A7401,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A7402,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A7403,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A7404,YFAMYGEKVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A7405,YFAMYQENVAHADVDTLYIMYQDYTWAVLAYTWY\nHLA-A7406,YFAMYQENVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A7407,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A7408,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A7409,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A7410,YFAMYQENVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A7411,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:01,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:02,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:03,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:04,YFAMYGEKVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:05,YFAMYQENVAHADVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:06,YFAMYQENVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:07,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:08,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:09,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:10,YFAMYQENVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A74:11,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:13,YFAMYQENVAQTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:15,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:16,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:17,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:18,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:19,YYAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:20,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:21,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:22,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:23,YFAMYQENVAHTDVDTLYIIYQDYTWAVLAYTWY\nHLA-A74:24,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:25,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:26,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:27,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:28,YFAMYQENVAHTDVDTLYIMYQDYTWAELAYTWY\nHLA-A74:29,YFAMYQENVAHTDVDTLYIMYQDYTWAVWAYTWY\nHLA-A74:30,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:31,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:33,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A74:34,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A8001,YFAMYEENVAHTNANTLYIIYRDYTWARLAYEGY\nHLA-A80:01,YFAMYEENVAHTNANTLYIIYRDYTWARLAYEGY\nHLA-A80:02,YFAMYEENVAHTDVDTLYIIYRDYTWARLAYEGY\nHLA-A80:03,YFAMYEENVAHTNANTLYIIYRDYTWARLAYTGY\nHLA-A80:04,YFAMYEENVAHTNENTLYIIYRDYTWARLAYEGY\nHLA-A9201,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYRWY\nHLA-A9202,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A9203,YFAMYQENVAQTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A9204,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A9205,YFAMYGEKVAHTHVDTLYVRYEYYTWAVLAYTWY\nHLA-A9206,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9207,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9208,YYAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A9209,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9210,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A9211,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9212,YFAMYGEKVAHTDENIAYVRCHYYTWAVLAYTWY\nHLA-A9214,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A9215,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A9216,YFAMYGEKVAHTHLDTLYVRYHYYTWAVLAYTWY\nHLA-A9217,YFAMYGEKVAHTHVDTLYVRYQDYTWAEWAYTWY\nHLA-A9218,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9219,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9220,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9221,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9222,YYAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A9223,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A9224,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A9226,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-B0702,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0703,YYSEYRNIYTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B0704,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYEWY\nHLA-B0705,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B0706,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B0707,YYSEYRNIYAQTDESNLYLRYDYYTWAERAYEWY\nHLA-B0708,YYSEYRNIFTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B0709,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B0710,YYSEYRNICAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0711,YYSEYRNIYAQTDENNLYLSYDSYTWAERAYEWY\nHLA-B0712,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B0713,YYSGYREKYRQADVSNLYLSYDYYTWAERAYEWY\nHLA-B0714,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B0715,YYSEYRNIYAQADVSNLYLSYDYYTWAERAYEWY\nHLA-B0716,YYSEYRNIYTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B0717,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B0718,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B0719,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYTWY\nHLA-B0720,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLSY\nHLA-B0721,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0722,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0723,YYSEYRNIYAQTDESNLHLSYDYYTWAERAYEWY\nHLA-B0724,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B0725,YYSEYRNIYAQTDESNLYLSYDYYTWAVDAYEWY\nHLA-B0726,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYEWY\nHLA-B0727,YYSEYRNISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B0728,YYSEYRNIYAQTDESNLYLSYDDYTWAERAYEWY\nHLA-B0729,YDSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0730,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0731,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYTWY\nHLA-B0732,YYSEYRNIFTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B0733,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0734,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYTWY\nHLA-B0735,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0736,YYSEYRNIYAQTDENIAYLSYDYYTWAERAYEWY\nHLA-B0737,YYSEYRNIYANTYESNLYLSYDYYTWAERAYEWY\nHLA-B0738,YYSEYRNIFTNTYENIAYLSYDYYTWAERAYEWY\nHLA-B0739,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0740,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B0741,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0742,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0743,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYTWY\nHLA-B0744,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0745,YYSEYRNIYAQTDESNLYLSYDYYTWAERTYEWY\nHLA-B0746,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0747,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0748,YYSEYRNIYAQTDESNLYLSYDYYTWAVLAYEWY\nHLA-B0749,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0750,YYSEYRNISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B0751,YYSEYRNIYAQTDESNLYLSYDYYTWAARAYEWY\nHLA-B0752,YYSEYRNIYAQTDESNLYLSYDYYTWAERVYEWY\nHLA-B0753,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B0754,YYSEYREIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0755,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYEWY\nHLA-B0756,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0757,YYSEYRNIYAQTDENNLYLSYDYYTWAERAYEWY\nHLA-B0758,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:02,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:03,YYSEYRNIYTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:04,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYEWY\nHLA-B07:05,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:06,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:07,YYSEYRNIYAQTDESNLYLRYDYYTWAERAYEWY\nHLA-B07:08,YYSEYRNIFTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:09,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B07:10,YYSEYRNICAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:100,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYLWY\nHLA-B07:101,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:102,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:103,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:104,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:105,YYSEYRNIYAQTVESNLYLSYNYYTWAERAYEWY\nHLA-B07:106,YYSEYRNIYAQTDESNLYLSYDYYTRAERAYEWY\nHLA-B07:107,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:108,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:109,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:11,YYSEYRNIYAQTDENNLYLSYDSYTWAERAYEWY\nHLA-B07:110,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:112,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:113,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:114,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:115,YYSEYRNIYAQTDESNLYLSYNFYTWAERAYEWY\nHLA-B07:116,YYSEYRNIYVQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:117,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:118,YYSEYRNIYAQTDESNLYLSYDDYTWAERAYEWY\nHLA-B07:119,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:12,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B07:120,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:121,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYERY\nHLA-B07:122,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:123,YYSEYREIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:124,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:125,YYSEYRNIYTQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:126,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:127,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:128,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:129,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:13,YYSGYREKYRQADVSNLYLSYDYYTWAERAYEWY\nHLA-B07:130,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:131,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYLWY\nHLA-B07:132,YYSEYRNLYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:133,YYSKYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:134,YYSTYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:136,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:137,YYSEYRNIYAQTDESNLYIRYNYYTWAERAYEWY\nHLA-B07:138,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:139,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:14,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B07:140,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYESY\nHLA-B07:141,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:142,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:143,YHSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:144,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWS\nHLA-B07:145,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:146,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYEWY\nHLA-B07:147,YYSEYRNIYAQTDESNLYLSYDYYTLAELAYEWY\nHLA-B07:148,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:149,YYSEYRNIYAQTDENTAYLSYDYYTWAERAYEWY\nHLA-B07:15,YYSEYRNIYAQADVSNLYLSYDYYTWAERAYEWY\nHLA-B07:150,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:151,YYSEYRNIYAQTDESNLYLSYDYYTLAERAYEWY\nHLA-B07:152,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:153,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYESY\nHLA-B07:154,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:155,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:156,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:157,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:158,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:159,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:16,YYSEYRNIYTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B07:160,YYSEYRNIYAQADESNLYLSYDYYTWAERAYEWY\nHLA-B07:162,YYSEYRNIYAQTDESNLYIRYDSYTWAERAYEWY\nHLA-B07:163,YYSEYRNIYAQTDESNLYIRSDYYTWAERAYEWY\nHLA-B07:164,YYSEYRNIYAQTDESNLYLSYDYYTWARLAYEWY\nHLA-B07:165,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:166,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:168,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:169,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:17,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B07:170,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:171,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:172,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:173,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:174,YYSEYRNIYAQTDESNLYLSYDYYSWAERAYEWY\nHLA-B07:175,YYSEYRNIYAQTDESNLYLSYDYCTWAERAYEWY\nHLA-B07:176,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:177,YYTEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:178,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:179,YYSEYRNIYAQTDESDLYLSYDYYTWAERAYEWY\nHLA-B07:18,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B07:180,YYSEYRNICTNTYENIAYLSYDYYTWAERAYEWY\nHLA-B07:183,YYSEYRNISAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:184,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:185,YYSEYRNIYAQTDDSNLYLSYDYYTWAERAYEWY\nHLA-B07:186,YDSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:187,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:188,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:189,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:19,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYTWY\nHLA-B07:190,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:191,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:192,YYSEYRNIYAQTDESSLYLSYDYYTWAERAYEWY\nHLA-B07:193,YYSEYRNIYAQTDESNLYLRSDYYTWAERAYEWY\nHLA-B07:194,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:195,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:196,YYSEYRNIYAQTDESNLYLNYDYYTWAERAYEWY\nHLA-B07:197,YYSEYRNIYAQTDESNLYLNYHDYTWAERAYEWY\nHLA-B07:198,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:199,YYSEYRNIYAQTDESNLYISYDYYTWAERAYEWY\nHLA-B07:20,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLSY\nHLA-B07:200,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:202,YYSEYRNIYAQTDESNLYLSYDYYSLAVRAYEWY\nHLA-B07:203,YYSEYRNIYAQIDESNLYLSYDYYTWAERAYEWY\nHLA-B07:204,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:205,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:206,YYAEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:207,YYSEYRNIFTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B07:208,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:209,YYSEYRNIYAQTDESNLYLSYNYYTWAVLAYEWY\nHLA-B07:21,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:210,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:211,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:212,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:213,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:214,YYSEYRNIYAQTDESNLYLRYNFYTWAERAYEWY\nHLA-B07:215,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:216,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:217,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:218,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWH\nHLA-B07:219,YYSEYGENMASTYENIAYLSYDYYTWAERAYEWY\nHLA-B07:22,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:220,YYSEYRNIYAQTDESNLYLSYDYYTWAARAYLWH\nHLA-B07:221,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:222,YYSEYRNIYAQTDESNLYLSYNYYSWAERAYEWY\nHLA-B07:223,YYSEYRNIYAQTDESNLYLSYDYYTWAVLAYLWY\nHLA-B07:224,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:225,YYSEYRNIYAQTDESNLYMVYEHYTWAERAYEWY\nHLA-B07:226,YYSEYRNIYAQTDESNLYLSYDYYTWAALAYEWY\nHLA-B07:227,YYSEYRNIYAQTDESNLYIVYDYYTWAERAYEWY\nHLA-B07:228,YYSEYRNIFTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B07:229,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:23,YYSEYRNIYAQTDESNLHLSYDYYTWAERAYEWY\nHLA-B07:230,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:232,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:233,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:234,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:235,YYSEYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B07:236,YYSEYRNIYAQTDEDTLYLSYDYYTWAERAYEWY\nHLA-B07:237,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B07:238,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:239,YYSEYRNIYAQTDESNLYLWYDYYTWAERAYEWY\nHLA-B07:24,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B07:240,YYSEYRNIYAQTDEDNLYLSYDYYTWAERAYEWY\nHLA-B07:241,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:242,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:243,YYSEYQNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:244,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:245,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:246,YYSEYRNIYAQTDESNLYLSYDFYTWAERAYEWY\nHLA-B07:247,YYSGYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:248,YYSEYRNIYAQTDESNLYIRSDYYTWAERAYEWY\nHLA-B07:249,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:25,YYSEYRNIYAQTDESNLYLSYDYYTWAVDAYEWY\nHLA-B07:250,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:252,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:253,YYSEYRNIYAQTDVNNLYLSYDYYTWAERAYEWY\nHLA-B07:254,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:255,YYSEYRNIYAQTDESNLYLRYDSYTWAERAYEWY\nHLA-B07:256,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:257,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:258,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:259,YYSEYRNIYRQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:26,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYEWY\nHLA-B07:260,YYSEYRNIYAQTDESNLYLSYEHYTWAERAYEWY\nHLA-B07:261,YYSEYRNIYAQTDESNLYLSYDYYTWAVRAYEWY\nHLA-B07:262,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:263,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:264,YYSEHRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:265,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:266,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:267,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:268,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:269,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYQWY\nHLA-B07:27,YYSEYRNISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B07:270,YYSGYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:271,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:273,YYSEYRNIYAQTDESTLYLSYDYYTWAERAYEWY\nHLA-B07:274,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:275,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:276,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:277,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:278,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:279,YYSEYRNIYAQTDESNLYLSYDYYSLAERAYEWY\nHLA-B07:28,YYSEYRNIYAQTDESNLYLSYDDYTWAERAYEWY\nHLA-B07:280,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:281,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:282,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:283,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:284,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:286,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:287,YYSEYRNIYAQTDESNLYLSYNSYTWAERAYEWY\nHLA-B07:288,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:289,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:29,YDSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:290,YYSEYRIIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:291,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:292,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:293,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:294,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:295,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:296,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:297,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:298,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:299,YYSEYRNIYAQTDQSNLYLSYDYYTWAERAYEWY\nHLA-B07:30,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:300,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:301,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:302,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:303,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:304,YYSEYRNIYAQTDVSNLYLSYNYYTWAERAYEWY\nHLA-B07:305,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:306,YYSEYRNICTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B07:307,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:308,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:309,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:31,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYTWY\nHLA-B07:310,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:311,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:312,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:313,CYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:314,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:317,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:319,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:32,YYSEYRNIFTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:320,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:321,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:322,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:323,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:324,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:326,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:327,YYSEYRNIYAQTDESNLYLSYNFYTWAERAYEWY\nHLA-B07:328,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:329,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:33,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:331,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:332,YYSEYRNIYAQTDESNLYLSYNYYTWAARAYEWY\nHLA-B07:333,YYSQYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:334,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:335,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:336,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:337,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:338,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:339,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:34,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYTWY\nHLA-B07:340,YYSEYRNIYAQTDENNLYLSYNYYTWAERAYEWY\nHLA-B07:341,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:342,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:344,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:345,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:346,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:347,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:348,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:349,YYSEYRNIYAQTDESNLYLSYNYYTWAEDAYEWY\nHLA-B07:35,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:350,YYSEYRNIYAQTDESNLYISYDYYTWAERAYEWY\nHLA-B07:352,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:353,YYSEYRNIYAQTDESNLYLSYNYYAWAERAYEWY\nHLA-B07:354,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:355,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:356,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:357,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:358,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYLWY\nHLA-B07:36,YYSEYRNIYAQTDENIAYLSYDYYTWAERAYEWY\nHLA-B07:37,YYSEYRNIYANTYESNLYLSYDYYTWAERAYEWY\nHLA-B07:38,YYSEYRNIFTNTYENIAYLSYDYYTWAERAYEWY\nHLA-B07:39,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:40,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:41,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:42,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:43,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYTWY\nHLA-B07:44,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:45,YYSEYRNIYAQTDESNLYLSYDYYTWAERTYEWY\nHLA-B07:46,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:47,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:48,YYSEYRNIYAQTDESNLYLSYDYYTWAVLAYEWY\nHLA-B07:50,YYSEYRNISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B07:51,YYSEYRNIYAQTDESNLYLSYDYYTWAARAYEWY\nHLA-B07:52,YYSEYRNIYAQTDESNLYLSYDYYTWAERVYEWY\nHLA-B07:53,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:54,YYSEYREIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:55,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYEWY\nHLA-B07:56,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:57,YYSEYRNIYAQTDENNLYLSYDYYTWAERAYEWY\nHLA-B07:58,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:59,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:60,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B07:61,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:62,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:63,YYSDYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:64,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEGY\nHLA-B07:65,YYATYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:66,YYSEYRNIYAQTDESNLYLSYDYYTWAEQAYEWY\nHLA-B07:68,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:69,YYSEYRNICTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:70,YCSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:71,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:72,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:73,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:74,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:75,YYSEYRNIYAQTYENNLYLSYDYYTWAERAYEWY\nHLA-B07:76,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:77,YYSEYRNIYAQTDESNLYLRSDYYTWAERAYEWY\nHLA-B07:78,YYSEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B07:79,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:80,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYEWY\nHLA-B07:81,YYSEYRNIYAQTDESIAYLSYDYYTWAERAYEWY\nHLA-B07:82,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:83,YYSEYRNIFAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:84,YYSEYRNIYAQTDESNLYWTYDYYTWAERAYEWY\nHLA-B07:85,YYSEYRNICTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:86,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:87,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:88,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:89,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:90,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:91,YYSEYRNIYAQTYESNLYLSYDYYTWAERAYEWY\nHLA-B07:92,YYSEYRNIYAQTDVSNLYLSYDYYTWAERAYEWY\nHLA-B07:93,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:94,YYSEYWNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:95,YYSEYRNIYAQTDESNLYFSYDYYTWAERAYEWY\nHLA-B07:96,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B07:97,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B07:98,YYSEYRNIYAQTDESNLYLSYDYYTCAERAYEWY\nHLA-B07:99,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B0801,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0802,YDSEYRNIFTNTDENTAYLSYNYYTWAVDAYTWY\nHLA-B0803,YDSEYRNIFTNTYENIAYLSYNYYTWAVDAYTWY\nHLA-B0804,YDSEYRNISTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0805,YDSEYRNTFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0806,YDSEYRNIFTNTDENNLYLSYNYYTWAERAYTWY\nHLA-B0807,YDSEYRNIFTNTDESNLYLSYDYYTWAVDAYTWY\nHLA-B0808,YDSEYRNIFTNTDESNLYLSYNYYPGPWTPTRGT\nHLA-B0809,YDSEYRNIFTNTDESNLYWTYNYYTWAVDAYTWY\nHLA-B0810,YDSEYRDIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0811,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYAWY\nHLA-B0812,YDSEYRNIFTNTDESNLYLRYNYYTWAVDAYTWY\nHLA-B0813,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYTWY\nHLA-B0814,YDSEYRNIFTNTDESNLYLSYHDYTWAVDAYTWY\nHLA-B0815,YDSEYRNIFTNTDVSNLYLSYNYYTWAVDAYTWY\nHLA-B0816,YDSEYRNIFTNADESNLYLRYNYYTWAVDAYTWY\nHLA-B0817,YDSEYREISTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B0818,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0819,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0820,YDSEYRNIFTNTDESNLYLSYNYYTWAERAYTWY\nHLA-B0821,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYLWY\nHLA-B0822,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0823,YDSEYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B0824,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0825,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYLWY\nHLA-B0826,YYAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0827,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0828,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B0829,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0831,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0832,YDSTYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B0833,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:01,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:02,YDSEYRNIFTNTDENTAYLSYNYYTWAVDAYTWY\nHLA-B08:03,YDSEYRNIFTNTYENIAYLSYNYYTWAVDAYTWY\nHLA-B08:04,YDSEYRNISTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:05,YDSEYRNTFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:07,YDSEYRNIFTNTDESNLYLSYDYYTWAVDAYTWY\nHLA-B08:09,YDSEYRNIFTNTDESNLYWTYNYYTWAVDAYTWY\nHLA-B08:10,YDSEYRDIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:100,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:101,YDSEYREIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:102,YDTEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:103,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:104,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:105,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:106,YDSEYWNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:107,YDSEYRNIFTNTDESNLYLRYDSYTWAVDAYTWY\nHLA-B08:108,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:109,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:11,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYAWY\nHLA-B08:110,YDSEYRNISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B08:111,YDSEYRNIFTNTDESNLYLSYDYYTWAVDAYTWY\nHLA-B08:112,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:113,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:114,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:115,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:116,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:117,YDSEYRNIFTNTDESTAYLSYNYYTWAVDAYTWY\nHLA-B08:118,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWH\nHLA-B08:119,YDSEYGNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:12,YDSEYRNIFTNTDESNLYLRYNYYTWAVDAYTWY\nHLA-B08:120,YDSEYRNIFTNTDESNLYLSYNYYTLAVDAYTWY\nHLA-B08:121,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:122,YDTEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:123,YDSEYRNIYAQADESNLYLSYNYYTWAVDAYTWY\nHLA-B08:124,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:125,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYMWY\nHLA-B08:126,YDSEYRNIFTNTDEDTLYLSYNYYTWAVDAYTWY\nHLA-B08:127,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:128,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:129,YDSEYRNIFTNTDESNLYIRYNYYTWAVDAYTWY\nHLA-B08:13,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYTWY\nHLA-B08:130,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:131,YDSEYRNIFTNTDEDTLYLSYNYYTWAVDAYTWY\nHLA-B08:132,YDSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:133,YDSTYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B08:134,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:135,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:136,YDSEYRNIFTNTDESTLYLSYNYYTWAVDAYTWY\nHLA-B08:137,YDSEHRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:138,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:139,YDSEYRNIFTNTYEDTLYLSYNYYTWAVDAYTWY\nHLA-B08:14,YDSEYRNIFTNTDESNLYLSYHDYTWAVDAYTWY\nHLA-B08:140,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:141,YDSEYRNNFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:142,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:143,YDSEYRNIFTNTDESNLYLSYNYYTWAALAYTWY\nHLA-B08:144,YDSEYPNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:145,YDSEYRNIFTNADESNLYLSYNYYTWAVDAYTWY\nHLA-B08:146,YDSEYRNIYTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:147,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:149,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:15,YDSEYRNIFTNTDVSNLYLSYNYYTWAVDAYTWY\nHLA-B08:150,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:151,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:152,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:153,YDSEYRNIFTNTDESNPYLSYNYYTWAVDAYTWY\nHLA-B08:154,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:155,YDSKYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:156,YDSEYRNIFTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B08:157,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:158,YDSEYRNIFTNTEESNLYLSYNYYTWAVDAYTWY\nHLA-B08:159,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:16,YDSEYRNIFTNADESNLYLRYNYYTWAVDAYTWY\nHLA-B08:160,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:161,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:162,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:163,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:164,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:165,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYEWY\nHLA-B08:166,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:167,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:168,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:169,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:17,YDSEYREISTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B08:170,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:171,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYEWY\nHLA-B08:172,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:173,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:174,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:175,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYLWY\nHLA-B08:176,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:177,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:178,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:179,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:18,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:180,YDATYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:181,YDSEYRNIFTNTDESNLYIVYNYYTWAVDAYTWY\nHLA-B08:182,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:183,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:184,YDSEYRNIFTNTDESNLYLNYNYYTWAVDAYTWY\nHLA-B08:185,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:186,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYKWY\nHLA-B08:187,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:188,YDSGYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:189,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:190,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:191,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:192,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:193,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:194,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:195,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:196,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:197,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:198,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:199,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:20,YDSEYRNIFTNTDESNLYLSYNYYTWAERAYTWY\nHLA-B08:200,YDSEYRNIFTNTDESNLYLSYNFYTWAVDAYTWY\nHLA-B08:201,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:202,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:203,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:204,YDSEYRNIFTNTDESNLYVSYNYYTWAVDAYTWY\nHLA-B08:205,YDSEYRNIFTNTDESNLYLSYNYYTLAVLAYEWY\nHLA-B08:206,YDSEYRNIFTNTDESNLYLSHNYYTWAVDAYTWY\nHLA-B08:207,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:208,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:209,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:21,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYLWY\nHLA-B08:210,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:211,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:212,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:213,YDSEYRNIFTNTDESNLYLSYNYYSWAVDAYTWY\nHLA-B08:216,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:217,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:218,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:219,YDSEYRNIFTITDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:22,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:221,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:222,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:223,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:224,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:23,YDSEYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B08:24,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:25,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYLWY\nHLA-B08:26,YYAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:27,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:28,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B08:29,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:31,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:32,YDSTYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:33,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:34,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:35,YDSEYRNIFTNTDESNLYLSYNSYTWAVDAYTWY\nHLA-B08:36,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:37,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B08:38,YDSEYREIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:39,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:40,YDSEYRNIFTNTDESNLYLSYNYYTWAVRAYEWY\nHLA-B08:41,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:42,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:43,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:44,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:45,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:46,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:47,YDSEYRNIFTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B08:48,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:49,YDSEYRNIFTNTDESNLYIRSNFYTWAVDAYTWY\nHLA-B08:50,YYSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:51,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:52,YDSEYRNIFTNTDESIAYLSYNYYTWAVDAYTWY\nHLA-B08:53,YDSEYRNIFTNTDESNLYLSYNYYTWAEDAYTWY\nHLA-B08:54,YDSEYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B08:55,YDSEYRNIFTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B08:56,YDAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:57,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:58,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:59,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:60,YDSEYRNIFTNTDESNLYISYNYYTWAVDAYTWY\nHLA-B08:61,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:62,YHSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:63,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:64,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:65,YDSEYRNICTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:66,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:68,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:69,YDSEYRNIFTNTDESNLYLSYNFYTWAVDAYTWY\nHLA-B08:70,YDSEYRNIFTNTDESNLYLSYNYYTWAVWAYTWY\nHLA-B08:71,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:73,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:74,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:75,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:76,YDSEYRNIFTNTDESNLYISYNYYTWAVDAYTWY\nHLA-B08:77,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:78,YDSEYRNIFTNTDENIAYLSYNYYTWAVDAYTWY\nHLA-B08:79,YDSEYRNIFTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B08:80,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:81,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:83,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:84,YDSEYRNIFTNTDESNLYWTYNYYTRAVDAYTWY\nHLA-B08:85,YDSEYRNIFSNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:87,YDSEYRNIFTNTDESNLYLSYDDYTWAVDAYTWY\nHLA-B08:88,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:89,YDSEYRNIFTNTDESNLYLRYDSYTWAVDAYTWY\nHLA-B08:90,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:91,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:92,YDSEYRNIFTNTDESNLCLSYNYYTWAVDAYTWY\nHLA-B08:93,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:94,YYSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:95,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:96,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:97,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:98,YDSEYQNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B08:99,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B1301,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B1302,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B1303,YYTMYREISTNTYENTAYWTYNLYTWAVLAYLWY\nHLA-B1304,YYTMYREISTNTYENTAYWTYDSYTWAVLAYLWY\nHLA-B1306,YYTMYREISTNTYENTAYIRYNLYTWAVLAYTWY\nHLA-B1308,YYTMYREISTNTYENTAYWTYNLYTWAVLACEWY\nHLA-B1309,YYTMYREISTNTYESNLYWTYNLYTWAVLAYEWY\nHLA-B1310,YYTMYREISTNTYENTAYLRYDSYTWAVLAYEWY\nHLA-B1311,YYTMYREISTNTYENTAYLRYNLYTWAVLAYEWY\nHLA-B1312,YYTMYREISTNTYENTAYIRYNLYTWAVLAYGWY\nHLA-B1313,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B1314,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B1315,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWH\nHLA-B1316,YYTMYREISTNTYENTAYWTYNLYTWAELAYEWY\nHLA-B1317,YYAMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B1318,YYTMYREISTNTYENTAYWTYNLYTWAVRAYEWY\nHLA-B1319,YYTMYREVSTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B1320,YYTMYREISTNTYENTAYIRYNLYTWAELAYEWY\nHLA-B13:01,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:02,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:03,YYTMYREISTNTYENTAYWTYNLYTWAVLAYLWY\nHLA-B13:04,YYTMYREISTNTYENTAYWTYDSYTWAVLAYLWY\nHLA-B13:06,YYTMYREISTNTYENTAYIRYNLYTWAVLAYTWY\nHLA-B13:08,YYTMYREISTNTYENTAYWTYNLYTWAVLACEWY\nHLA-B13:09,YYTMYREISTNTYESNLYWTYNLYTWAVLAYEWY\nHLA-B13:10,YYTMYREISTNTYENTAYLRYDSYTWAVLAYEWY\nHLA-B13:100,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:101,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:102,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:104,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:105,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:106,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:107,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:108,YYTMYREISTNTYENTAYLSYNYYTWAVLAYEWY\nHLA-B13:109,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:11,YYTMYREISTNTYENTAYLRYNLYTWAVLAYEWY\nHLA-B13:110,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:111,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:112,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:113,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:114,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:115,YYTMYREISSNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:117,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:118,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:119,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:12,YYTMYREISTNTYENTAYIRYNLYTWAVLAYGWY\nHLA-B13:120,YYTMYREISTNTYENTAYWTYNLYTWAVLAYLWY\nHLA-B13:121,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:122,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:124,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:125,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:126,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:127,YYTMYREISTNTYENTAYWTYDSYTWAVLAYEWY\nHLA-B13:128,YYTMYREISTNTYENTAYWTYNLYTWAERAYEWY\nHLA-B13:129,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:13,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B13:130,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:14,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:15,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWH\nHLA-B13:16,YYTMYREISTNTYENTAYWTYNLYTWAELAYEWY\nHLA-B13:17,YYAMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:18,YYTMYREISTNTYENTAYWTYNLYTWAVRAYEWY\nHLA-B13:19,YYTMYREVSTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:20,YYTMYREISTNTYENTAYIRYNLYTWAELAYEWY\nHLA-B13:21,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B13:22,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:23,YHTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:25,YYTMYREISTNTYESTAYIRYNLYTWAVLAYEWY\nHLA-B13:26,YYTMYREISTNTYENTAYIRYDSYTWAVLAYEWY\nHLA-B13:27,YYTMYREISTNTYENTAYWTFNLYTWAVLAYEWY\nHLA-B13:28,YYTMYREISTNTYENTACIRYNLYTWAVLAYEWY\nHLA-B13:29,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:30,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:31,YYTMYREISTNTYENTAYWTYNLYTWAEWAYEWY\nHLA-B13:32,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:33,YYAMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:34,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:35,YYTMYREISTNTYENTAYWTYDYYTWAVLAYEWY\nHLA-B13:36,YYTMYRNISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:37,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:38,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:39,YYTMYREISTNTYENNLYIRYNLYTWAVLAYEWY\nHLA-B13:40,YYTMYREISTNTYENTAYWTYNLYTWTVLAYEWY\nHLA-B13:41,YYTMYREISTNTYENTAYWTYNLYTWAVWAYEWY\nHLA-B13:42,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:43,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:44,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:45,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:46,YYTMYREISTNTYENTAYWTYNFYTWAVDAYTWY\nHLA-B13:47,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:48,YYTMYREISTNTYENTAYWTYNLYTWAELAYLWY\nHLA-B13:50,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:51,YYTMYREISTNTYENTAYIRYNFYTWAVLAYEWY\nHLA-B13:52,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:53,YYTMYREISTNTYENTAYWTYNLYTWAVLAYTWY\nHLA-B13:54,YYTMYREISANTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:55,YHTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:57,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:58,YYTTYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:59,YYTMYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B13:60,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:61,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:62,YYTMYREISTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B13:64,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:65,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:66,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:67,YYTMYREISTNTYENNLYWTYNLYTWAVLAYEWY\nHLA-B13:68,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:69,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:70,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:71,YYTMYREISTNTYENTAYLSYNYYTWAVLAYEWY\nHLA-B13:72,YYTMYREISTNTYENTAYWTYDSYTWAVLAYEWY\nHLA-B13:73,YYTMYREISTNTYENTAYIRYNLYTWAVRAYEWY\nHLA-B13:74,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:75,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:77,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:78,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:79,YYTMYREISTNTYENTAYIRSNFYTWAVLAYEWY\nHLA-B13:80,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:81,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:82,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:83,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:84,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:85,YYTMYREISTDTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:86,YYTMYREISTNTYENTAYIRYNYYTWAVLAYLWY\nHLA-B13:87,YYTMDREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:88,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:89,YYTMYREISTNTYENTAYWTYNLYTWAVLTYEWY\nHLA-B13:90,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:91,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:92,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:93,YDTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:94,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:95,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B13:96,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:97,YYTEYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:98,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B13:99,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B1401,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B1402,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B1403,YYSEYRNICTNTDESNLYLWYNFYTWAERAYTWH\nHLA-B1404,HYSEYRNNCTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B1405,YYSEYRNICTNTDESNLYLSYNFYTWAELAYTWH\nHLA-B1406,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B14:01,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:02,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:03,YYSEYRNICTNTDESNLYLWYNFYTWAERAYTWH\nHLA-B14:04,HYSEYRNNCTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:05,YYSEYRNICTNTDESNLYLSYNFYTWAELAYTWH\nHLA-B14:06,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B14:08,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B14:09,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:10,YYSEYRNICTNTDESNLYIRYNFYTWAELAYTWH\nHLA-B14:11,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:12,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:13,YYSEYRNICTNTDESNLYLSYNYYTWAELAYTWH\nHLA-B14:14,YYSEYRNICTNTYESNLYLWYNFYTWAELAYTWH\nHLA-B14:15,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:16,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:17,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:18,YYSEYRNICTNTYESNLYLWYNFYTWAELAYTWH\nHLA-B14:19,YYSEYRNICTNTDESNLYLWYNFYTWAELAYLWH\nHLA-B14:20,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:21,YYSEYRNIYAQTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:22,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:23,YYSEYRNICTNTDESNLYLWYDYYTWAELAYTWH\nHLA-B14:24,YYSEYGNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:25,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:26,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:27,YYSEYRNICTNTDESNLYLWYNFYTWAELAYLWH\nHLA-B14:28,YDSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:29,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:30,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:31,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:32,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:33,YYSEYRNICTNTDESNLYLWYDYYTWAELAYTWH\nHLA-B14:34,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:35,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:36,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:37,YYSEYRNICTNTDESNLYWRYNLYTWAELAYTWH\nHLA-B14:38,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:39,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:40,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:42,YYSEYRNIFTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:43,YYSEYRNICTNTDESNLYLWYNFYTWAVLAYTWH\nHLA-B14:44,YYSEYRNICTNTDESNLYLWYNFYTWAELAYAWH\nHLA-B14:45,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:46,YYSEYRNICTNTDESNLYLWYNFYTWAELAYEWY\nHLA-B14:47,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:48,YYTEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:49,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:50,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:51,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:52,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:53,YYSEYRNICTNTDESNLYLSYNYYTWAELAYTWH\nHLA-B14:54,YYAEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:55,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B14:56,YYSEYRNICTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B14:57,YYSEYRNICAKTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:58,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:59,YYSEYRNICTNTDESNLYLWYNFYTWAVLAYTWH\nHLA-B14:60,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:61,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:62,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWY\nHLA-B14:63,YYSEYRNICTNTDVSNLYLWYNFYTWAELAYTWH\nHLA-B14:64,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:65,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:66,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:67,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B14:68,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B1501,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1502,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B1503,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B1504,YYAMYREISTNTYESNLYWTYDSYTWAEWAYLWY\nHLA-B1505,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B1506,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B1507,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B1508,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1509,YYSEYRNICTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B1510,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B1511,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1512,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B1513,YYAMYRNISTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B1514,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLSY\nHLA-B1515,YYAMYRNISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1516,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B1517,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B1518,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B1519,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B1520,YYAMYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B1521,YYAMYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B1523,YYSEYRNICTNTYENIAYLRYDSYTWAELAYLWY\nHLA-B1524,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B1525,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B1527,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B1528,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1529,YYSEYRNIFTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B1530,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B1531,YYAMYRNISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B1532,YYAMYREISTNTYESNLYLRSDSYTWAEWAYLWY\nHLA-B1533,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1534,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1535,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B1536,YYAMYREISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B1537,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWH\nHLA-B1538,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWH\nHLA-B1539,YYAMYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B1540,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B1542,YYAMYREISTNTYESNLYWTYNLYTWAELAYTWY\nHLA-B1543,YYAMYREISTNTYEDTLYLRYDSYTWAEWAYLWY\nHLA-B1544,YYAMYRNICTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B1545,YYAMYREISTNTYESNLYLSYDYYTWAEWAYLWY\nHLA-B1546,YYAKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1547,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B1548,YYAMYREISTNTYESNLYLRYNYYTWAVLTYLWY\nHLA-B1549,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B1550,YYAMYREISTNTYESNLYLRYDSYTWAEWAYTWY\nHLA-B1551,YYSEYRNICTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B1552,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B1553,YYTKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1554,YYSEYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1555,YYAMYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B1556,YYAMYREIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1557,YYAMYREISTNTYVNNLYLRYDSYTWAEWAYLWY\nHLA-B1558,YYAMYREISTNTYESNLYLRYNFYTWAEWAYLWY\nHLA-B1560,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1561,YYSEYREISTNTDESNLYLRYDSYTWAELAYLWY\nHLA-B1562,YYSEYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B1563,YYAMYREISTNTYESNLYLRYDYYTWAEWAYLWY\nHLA-B1564,YYSEYRNISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B1565,YYAMYREISTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B1566,YYAMYREICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1567,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B1568,YYSEYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B1569,YYSEYREISTNTYESNLYLRYDSYTWAELTYTWY\nHLA-B1570,YYAMYREISTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B1571,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1572,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B1573,YYAMYREISTNTYESNLYLRYNLYTWAEWAYLWY\nHLA-B1574,YYSEYREISINTYESNLYLRYDSYTWAELAYLWY\nHLA-B1575,YYAMYREISTNTYESNLYLRYDSYTWAQWAYLWY\nHLA-B1576,YYAMYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B1577,YYAMYREISTNTYESNLYIRYDDYTWAEWAYLWY\nHLA-B1578,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1580,YYSEYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B1581,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1582,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1583,YYAMYREISTNTYESNLYWTYNYYTWAVDAYTWY\nHLA-B1584,YYAMYREISTNTYESNLYLRFDSYTWAVRAYLWY\nHLA-B1585,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B1586,YYAMYREISTNTYESNLYLRYNLYTWAVLAYTWY\nHLA-B1587,YYAMYREISTNTYESIAYLRYDSYTWAEWAYLWY\nHLA-B1588,YYAMYRNISTNTYESNLYIRYDSYTWATLAYLWY\nHLA-B1589,YYAMYRNISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B1590,YYSEYRNICTNTYESNLYLRYDYYTWAELVYLWY\nHLA-B1591,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLSY\nHLA-B1592,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1593,YYSEYRNICTNTYESNLYLRYDSYTWAELAYTWY\nHLA-B1595,YYAMYQENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B1596,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1597,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B1598,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B1599,YYSEYRNICTNTYESNLYLRYDYYTWAERAYLWY\nHLA-B15:01,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:02,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:03,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:04,YYAMYREISTNTYESNLYWTYDSYTWAEWAYLWY\nHLA-B15:05,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B15:06,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B15:07,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B15:08,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:09,YYSEYRNICTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B15:10,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B15:101,YYAMYREIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B15:102,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:103,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:104,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:105,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:106,YYAKYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:107,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B15:108,YYSEYRNICTNTYESNLYLRYDSYTWAELTYLWY\nHLA-B15:109,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B15:11,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:110,YYAMYREISTNTYESNLYLRCDSYTWAEWAYLWY\nHLA-B15:112,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:113,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:114,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B15:115,YYSEYRNICTNTYESTAYLRYDSYTWAELAYLWY\nHLA-B15:116,YYAMYREISTNTYESNLYLRYDSYSLAEWAYLWY\nHLA-B15:117,YYAMYREISTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B15:118,YYAMYREISTNTYESNLYLMYDSYTWAEWAYLWY\nHLA-B15:119,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:12,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B15:120,YYAMYRDISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:121,YYAMYRNISTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B15:122,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:123,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B15:124,YYSEYRNICTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B15:125,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:126,YYAMYREISTNTYESNLYLSYDSCTWAEWAYLWY\nHLA-B15:127,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:128,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:129,YYAMYREISTNTYESNLYLNYDSYTWAEWAYLWY\nHLA-B15:13,YYAMYRNISTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B15:131,YYSEYREISTNTYESNLYLRYDSYTWAELAYLSY\nHLA-B15:132,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:133,YYSEYRNICTNTYESNLYLRYDFYTWAELAYLWY\nHLA-B15:134,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:135,YYAMYREISTNTYENNLYLRYDSYTWAEWAYLWY\nHLA-B15:136,YYAMYREISTNTYESNLYLRYDSYTWAVLTYLWY\nHLA-B15:137,YYAMYREISTNTYESNLYWTYNFYTWAEWAYLWY\nHLA-B15:138,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B15:139,YYAMYRNISANTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:14,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLSY\nHLA-B15:140,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:141,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B15:142,YDAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:143,YYAKYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:144,YYAMYRNISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B15:145,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:146,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:147,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:148,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:15,YYAMYRNISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:150,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B15:151,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B15:152,YYAMYREIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:153,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:154,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B15:155,YYAMYREISTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B15:156,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:157,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B15:158,YYSEYREISTNTYESNLFLRYDSYTWAELAYLWY\nHLA-B15:159,YYAMYREISTNTYESNLHLRYDSYTWAEWAYLWY\nHLA-B15:16,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B15:160,YYAMHREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:161,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLSY\nHLA-B15:162,YYAMYRENMASTYENIAYLRYHDYTWAALAYLWY\nHLA-B15:163,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:164,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:165,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B15:166,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:167,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:168,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:169,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:17,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:170,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:171,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:172,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:173,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:174,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:175,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:176,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:177,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:178,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:179,YYAMYREISTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B15:18,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:180,YDSEYRNIFTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B15:183,YYTMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:184,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:185,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWH\nHLA-B15:186,YYSEYRNICTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B15:187,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:188,YYAMYREISTNTYESNLYLRYNYYTWAVLAYTWY\nHLA-B15:189,YYAMYRNICTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B15:19,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B15:191,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:192,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:193,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:194,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:195,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B15:196,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:197,YYSEYRNICTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B15:198,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:199,YYAMYREISTNTYESNLYLRYDSYTWAEDAYTWY\nHLA-B15:20,YYAMYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B15:200,YYSEYRNICTNTYESNLYLRYDSYTWATLAYLWY\nHLA-B15:201,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:202,YYATYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:203,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:204,YHAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:205,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:206,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:207,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B15:208,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:21,YYAMYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:210,YHSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:211,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:212,YHTKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:213,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:214,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:215,YYAMYRNIYTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B15:216,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:217,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:219,YYAMYREISTNTYESNLYLRYGSYTWAELAYLWY\nHLA-B15:220,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:221,YYSEYRNICTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B15:222,YYAMYRENMASTYENIAYWRYDSYTWAVLAYLWY\nHLA-B15:223,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:224,YYAMYREISTNTYESNLYLRYNLYTWAVLAYTWY\nHLA-B15:225,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:227,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:228,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:229,YYSEYRNICTNTYESNLYLRYNFYTWAELAYLWY\nHLA-B15:23,YYSEYRNICTNTYENIAYLRYDSYTWAELAYLWY\nHLA-B15:230,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:231,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:232,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:233,YYAMYREISTNTYESNLYLRYDDYTWAEWAYLWY\nHLA-B15:234,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:235,YYSEYREISTNTYESNLYLRYDSYTWAVLTYLWY\nHLA-B15:236,YYAMYREIFTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B15:237,YYAMYREISTNTYESNLYLRYDSYTWAVRAYLWY\nHLA-B15:238,YYSEYRNICTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B15:239,YYATYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:24,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B15:240,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:241,YYAMYREISTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B15:242,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:243,YYSEYREICTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B15:244,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:247,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:248,YYAMYREISTNTYESNLYLRYDYYTWAEWAYLWY\nHLA-B15:249,YYAMYREISTNTYESNLYLRYDSYTWAEWTYLWY\nHLA-B15:25,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:250,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:251,YYSMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:252,YYSEYRNICTNTYESNLYLRYNYYTWAELAYTWY\nHLA-B15:253,YYSEYREISTNTYESNLYLRYDFYTWAELAYLWY\nHLA-B15:254,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B15:255,YYSEYREIYAQTDESNLYLRYDSYTWAELAYLWY\nHLA-B15:256,YYAMYREISTNTYENTAYLRYDSYTWAEWAYLWY\nHLA-B15:257,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:259,YYAEYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:260,YYAMYREISTNTYESNLHLRYDSYTWAEWAYLWY\nHLA-B15:261,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:263,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:264,YYAMYREISTNTYESNLYLGYDSYTWAEWAYLWY\nHLA-B15:265,YYAMYRNISTNTYESNLYIRYDSYTWAELTYTWY\nHLA-B15:266,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:267,YYAMYREISTNTNESNLYLRYDSYTWAEWAYLWY\nHLA-B15:268,YYAMYRENMASTYENIAYLRYDSYTWAELAYLWY\nHLA-B15:269,YYAMYREISTNTYESNLYLRYDDYTWAEWAYLWY\nHLA-B15:27,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B15:270,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B15:271,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:273,YYAMYRENMASTYENIAYLRYHDYTWAELAHLWY\nHLA-B15:274,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:275,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B15:276,YYAMYREISTNTYESNLYLRYDSYTRAEWAYLWY\nHLA-B15:277,YYAMYREISTNTYESKLYLRYDSYTWAEWAYLWY\nHLA-B15:278,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:279,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:28,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:280,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:281,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:282,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:283,YYAMYRNISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B15:284,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:285,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:286,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:287,YYAMYREISTNTYESNLYLRYDYYTWAEWAYLWY\nHLA-B15:288,YYAMYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:289,YYAMYREISTNTYVSNLYIRYDSYTWAELAYLWY\nHLA-B15:29,YYSEYRNIFTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:290,YYSEYRNICTNTYESNLYLTYDSYTWAELAYLWY\nHLA-B15:291,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:292,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:293,YYSEYRNICTNTYESNLYWRYDSYTWAELAYLWY\nHLA-B15:295,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:296,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:297,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:298,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGC\nHLA-B15:299,YYAMYREISTNTYESNLYLRSDSYTWAEWAYLWY\nHLA-B15:30,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B15:300,YYAMYREISTNTYESNLYLRYDSYTWAECAYLWY\nHLA-B15:301,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:303,YYAMYRNIYTNTYESNLYWTYNLYTWAEWAYLWY\nHLA-B15:305,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:306,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:307,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:308,YYATYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:309,YYAMYREISTNTYESNLYLRYDSNTWAEWAYLWY\nHLA-B15:31,YYAMYRNISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B15:310,YYAMYREISTNTYESNLYWTYDSYTWAELAYLWY\nHLA-B15:311,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:312,YYSEYRNICTNTYESNLYLRYDYYTWAQLAYLWY\nHLA-B15:313,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B15:314,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:315,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:316,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:317,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:318,YYAMYREISTNTYVSNLYLRYDSYTWAEWAYLWY\nHLA-B15:319,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:32,YYAMYREISTNTYESNLYLRSDSYTWAEWAYLWY\nHLA-B15:320,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:322,YYAMYREISTNTYESNLYLRYDSYTWAEGAYLWY\nHLA-B15:323,YYSEYRNICTNTYESNLYLRYDYYTWAELAYTWH\nHLA-B15:324,YYAMYREISTNTYESNLYLSYNYYTWAEWAYLWY\nHLA-B15:325,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:326,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:327,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B15:328,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:329,YYSEYRNICTNTYESNLYLSYNYYTWAELAYLWY\nHLA-B15:33,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:330,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:331,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B15:332,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B15:333,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:334,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:335,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWH\nHLA-B15:336,YYTMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:337,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:338,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B15:339,YYAMYREISTNTYENTAYLRYHDYTWAELAYLWY\nHLA-B15:34,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:340,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:341,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:342,YYAMYREISTNTYESNLYLNYHDYTWAEWAYLWY\nHLA-B15:343,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:344,YHAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B15:345,YYTMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:346,YYAMYREISTNTYESNLYLRYDSYIWAEWAYLWY\nHLA-B15:347,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:348,YYAMYREISTNTYESNLYLRYDSYTWAVLAYTWY\nHLA-B15:349,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:35,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B15:350,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:351,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:352,YYAMYREISTNTYESNLYLRYDSYSWAEWAYLWY\nHLA-B15:353,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:354,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:355,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:356,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:357,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:358,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:359,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:36,YYAMYREISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B15:360,YYAMYREISTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B15:361,YYAMYRENMASTYENIAYLRYNYYTWAELAYLWY\nHLA-B15:362,YYAMYRENMASTYENIAYWRYDSYTWAELDYLWY\nHLA-B15:363,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:364,YYAMYREISTNTYESNLYLRYDSYTWAELAYLWH\nHLA-B15:365,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:366,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:367,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:368,YYAMYREISTNTYESNLYLRYDSYTWAEWAYTWH\nHLA-B15:369,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:37,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWH\nHLA-B15:370,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:371,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:372,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:373,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:374,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:376,YYSTYREICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:378,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:379,YYSEYREISTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B15:38,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWH\nHLA-B15:381,YHAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B15:382,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:383,YYAMYREISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B15:384,YYAMYRNISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:385,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:386,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:387,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:388,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:389,YYSEYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B15:39,YYAMYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:390,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:391,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:392,YYAMYRGISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:393,YYAMYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B15:394,YYAMYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:395,YYAMYREISTNTYESNLYLNYDSYTWAELAYLWY\nHLA-B15:396,YYAMYRENMASTYENIAYLRYHDYTWAVLAYLWY\nHLA-B15:397,YYSEYREISTNTDVSNLYLRYDSYTWAELAYLWY\nHLA-B15:398,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B15:399,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:40,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B15:401,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:402,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:403,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:404,YYAMYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:405,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B15:406,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:407,YYAMYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B15:408,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B15:409,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:410,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:411,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:412,YYAMYREISTNTYESNLYLRYDSYSWAEWAYLWY\nHLA-B15:413,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:414,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:415,YYAMYREISTNTYESNLYLRYDSYTWAVWAYLWY\nHLA-B15:416,YYAMYREISTNTYESNLYLRYESYTWAEWAYLWY\nHLA-B15:417,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:418,YYAMYRNISTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B15:419,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:42,YYAMYREISTNTYESNLYWTYNLYTWAELAYTWY\nHLA-B15:420,YYAMYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:421,YYAMYREISTNTYESNLYLSYDSYTWAVLAYEWY\nHLA-B15:422,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:423,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:424,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:425,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:426,YHSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:427,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B15:428,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:429,YDSMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:43,YYAMYREISTNTYEDTLYLRYDSYTWAEWAYLWY\nHLA-B15:430,YYAMYREISTNTYESKLYLRYDSYTWAEWAYLWY\nHLA-B15:431,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B15:432,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:433,YYSEYRNICTNTYESNLDLRYDSYTWAELAYLWY\nHLA-B15:434,YYSGYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:435,YYSEYRNICTNTYESNLYLRYNYYTWAELAYLWH\nHLA-B15:436,YYSGYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:437,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:438,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:439,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B15:44,YYAMYRNICTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B15:440,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:441,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:442,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B15:443,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:444,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:445,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B15:446,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:447,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:448,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:449,YYAMYREISTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B15:45,YYAMYREISTNTYESNLYLSYDYYTWAEWAYLWY\nHLA-B15:450,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B15:451,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:452,YYAMYRELSTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:453,YYAMYREISTNTYESNLYLRYNSYTWAEWAYLWY\nHLA-B15:455,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B15:456,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:457,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:458,CYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:459,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:46,YYAKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:460,YYAMYRNISTNTYESNLYIRYNFYTWAELAYLWY\nHLA-B15:461,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:462,YYAMYGENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B15:464,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:465,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:466,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:467,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:468,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:469,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:47,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B15:470,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:471,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:472,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:473,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:474,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:475,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:476,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:477,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:478,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:479,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:48,YYAMYREISTNTYESNLYLRYNYYTWAVLTYLWY\nHLA-B15:480,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:481,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:482,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:484,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:485,YYAMYREISTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B15:486,YYAMYRNIYTNTYESNLYLRYNLYTWAEWAYLWY\nHLA-B15:488,YYAMYREISTNTYESNLYLRYNFYTWAEWAYLWY\nHLA-B15:489,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:49,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B15:490,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:491,YYAMYRNISTNTYESNLHIRYDSYTWAELAYLWY\nHLA-B15:492,YYAMYPEISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:493,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:494,YYAMYREISTNTYESNLYLRYDSYTWAELTYTWY\nHLA-B15:495,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:497,YYAMYREISTNTYESNLYLRYDSYTWAELAYTWY\nHLA-B15:498,YYSEYRNICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:50,YYAMYREISTNTYESNLYLRYDSYTWAEWAYTWY\nHLA-B15:51,YYSEYRNICTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B15:52,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B15:53,YYTKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:54,YYSEYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:55,YYAMYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B15:56,YYAMYREIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:57,YYAMYREISTNTYVNNLYLRYDSYTWAEWAYLWY\nHLA-B15:58,YYAMYREISTNTYESNLYLRYNFYTWAEWAYLWY\nHLA-B15:60,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:61,YYSEYREISTNTDESNLYLRYDSYTWAELAYLWY\nHLA-B15:62,YYSEYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:63,YYAMYREISTNTYESNLYLRYDYYTWAEWAYLWY\nHLA-B15:64,YYSEYRNISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:65,YYAMYREISTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B15:66,YYAMYREICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:67,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B15:68,YYSEYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B15:69,YYSEYREISTNTYESNLYLRYDSYTWAELTYTWY\nHLA-B15:70,YYAMYREISTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B15:71,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:72,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:73,YYAMYREISTNTYESNLYLRYNLYTWAEWAYLWY\nHLA-B15:74,YYSEYREISINTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:75,YYAMYREISTNTYESNLYLRYDSYTWAQWAYLWY\nHLA-B15:76,YYAMYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B15:77,YYAMYREISTNTYESNLYIRYDDYTWAEWAYLWY\nHLA-B15:78,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:80,YYSEYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B15:81,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:82,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:83,YYAMYREISTNTYESNLYWTYNYYTWAVDAYTWY\nHLA-B15:84,YYAMYREISTNTYESNLYLRFDSYTWAVRAYLWY\nHLA-B15:85,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B15:86,YYAMYREISTNTYESNLYLRYNLYTWAVLAYTWY\nHLA-B15:87,YYAMYREISTNTYESIAYLRYDSYTWAEWAYLWY\nHLA-B15:88,YYAMYRNISTNTYESNLYIRYDSYTWATLAYLWY\nHLA-B15:89,YYAMYRNISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B15:90,YYSEYRNICTNTYESNLYLRYDYYTWAELVYLWY\nHLA-B15:91,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLSY\nHLA-B15:92,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:93,YYSEYRNICTNTYESNLYLRYDSYTWAELAYTWY\nHLA-B15:95,YYAMYQENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B15:96,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:97,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B15:98,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B15:99,YYSEYRNICTNTYESNLYLRYDYYTWAERAYLWY\nHLA-B1801,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1802,YHSTYRNISTNTYESNLYLNYDSYTWAVLAYTWH\nHLA-B1803,YHSTYRNISTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B1804,YYATYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1805,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1806,YHSTYRNISTNTYVSNLYLRYDSYTWAVLAYTWH\nHLA-B1807,YHSTYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1808,YHSTYRNISTNTYESNLYLRCDSYTWAVLAYTWH\nHLA-B1809,YHSTYRNISTNTYENTAYLRYDSYTWAVLAYTWH\nHLA-B1810,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B1811,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWY\nHLA-B1812,YHSTYREISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1813,YHSTYRNISTNTYESNLYLRYDSYTWAVRAYTWH\nHLA-B1814,YHSTYRNISTNTYESNLYLSYDSYTWAVLAYTWH\nHLA-B1815,YHSTYRNISTNTYESNLYLRYDSYTWAELAYTWH\nHLA-B1818,YHSTYRNISTNTYESNLYLRSDSYTWAVLAYTWH\nHLA-B1819,YHSTYRNISTNTYESNLYLRYDSYTWAEWAYTWH\nHLA-B1820,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1821,YHSTYRNISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B1822,YHSTYRNISTNTYESNLYISYDSYTWAVLAYTWH\nHLA-B1823,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1824,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1825,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B1826,YHSTYRNISTNTYESNLYLRYNYYTWAVLAYTWH\nHLA-B18:01,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:02,YHSTYRNISTNTYESNLYLNYDSYTWAVLAYTWH\nHLA-B18:03,YHSTYRNISTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B18:04,YYATYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:05,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:06,YHSTYRNISTNTYVSNLYLRYDSYTWAVLAYTWH\nHLA-B18:07,YHSTYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:08,YHSTYRNISTNTYESNLYLRCDSYTWAVLAYTWH\nHLA-B18:09,YHSTYRNISTNTYENTAYLRYDSYTWAVLAYTWH\nHLA-B18:10,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B18:100,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:101,YHSTYRNISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B18:102,YHTTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:103,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:104,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:105,YHSTYRNISTNTYESNLYIRYDSYTWAVLAYTWH\nHLA-B18:106,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:107,YYTKYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:108,YHSTYRNISTNTYEDNLYLRYDSYTWAVLAYTWH\nHLA-B18:109,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:11,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWY\nHLA-B18:110,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWY\nHLA-B18:111,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:112,YHLTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:113,YHSTYRNISTNTYESNLYLRYDSYTWAVDAYTWH\nHLA-B18:114,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:115,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:116,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:117,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:118,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:119,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:12,YHSTYREISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:120,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:121,YHSTYRNISTNTYESNLYLRYDSYTWAVLDYTWH\nHLA-B18:122,YHSTYRNISTNTYESNLYLRYNFYTWAVLAYTWH\nHLA-B18:123,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:124,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:125,YYAEYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:126,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:127,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:128,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:129,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:13,YHSTYRNISTNTYESNLYLRYDSYTWAVRAYTWH\nHLA-B18:130,YHSTYRNISTNIYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:131,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:132,YHSTYRNISTNTDEDNLYLRYDSYTWAVLAYTWH\nHLA-B18:133,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:134,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:135,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:136,YHSTYRNIFTNTYENIAYLRYDSYTWAVLAYTWH\nHLA-B18:137,YHSTYRNNSTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:139,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:14,YHSTYRNISTNTYESNLYLSYDSYTWAVLAYTWH\nHLA-B18:140,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:141,YHSTYRNISTNTYESNLYLRHDSYTWAVLAYTWH\nHLA-B18:142,YHSTYRNISTNTDVNNLYLRYDSYTWAVLAYTWH\nHLA-B18:143,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:144,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:145,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:146,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:147,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:148,YHSTYRNISTNTYESNLYLRYDFYTWAVLAYTWH\nHLA-B18:149,YHSTYRNISTNTYESNLYIRYDSYTWAVLAYTWH\nHLA-B18:15,YHSTYRNISTNTYESNLYLRYDSYTWAELAYTWH\nHLA-B18:150,YHSTYRNISTNTYESNLYIRFDSYTWAVLAYTWH\nHLA-B18:151,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:152,YHSTYRNISTNTDESNLYLRYDSYTWAELAYTWH\nHLA-B18:153,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:155,YHSTYRNISTSTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:156,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:157,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:158,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:159,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:160,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:161,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:18,YHSTYRNISTNTYESNLYLRSDSYTWAVLAYTWH\nHLA-B18:19,YHSTYRNISTNTYESNLYLRYDSYTWAEWAYTWH\nHLA-B18:20,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:21,YHSTYRNISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B18:22,YHSTYRNISTNTYESNLYISYDSYTWAVLAYTWH\nHLA-B18:24,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:25,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:26,YHSTYRNISTNTYESNLYLRYNYYTWAVLAYTWH\nHLA-B18:27,YHSTYRNISTNTYESNLYLMFDSYTWAVLAYTWH\nHLA-B18:28,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:29,YHATYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:30,YHSTYRNISTNTYESNLYLRYDSYTWAERAYTWH\nHLA-B18:31,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:32,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:33,YHSTYRNICTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:34,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:35,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B18:36,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYLWH\nHLA-B18:37,YHSEYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:38,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:39,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:40,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:41,YHSTYRNISTNTYESNLYLRYESYTWAVLAYTWH\nHLA-B18:42,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:43,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:44,YHSTYRNISTNTYESNLYLWYDSYTWAVLAYTWH\nHLA-B18:45,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:46,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:47,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:48,YHSKYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:49,YHSTYRNISTNTYENNLYLRYDSYTWAVLAYTWH\nHLA-B18:50,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWH\nHLA-B18:51,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:52,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:53,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:54,YHSTYRNISTNTYEDTLYLRYDSYTWAVLAYTWH\nHLA-B18:55,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:56,YHSTYRNISTNTYESNLYLRYDSYTWAVDAYLSY\nHLA-B18:57,YHSTYRNISTNTYESNLYLRYDSYTWAELAYLWH\nHLA-B18:58,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B18:59,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:60,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:61,YHSTYRNISTNTYESNLYLRYNYYTWAVLAYTWH\nHLA-B18:62,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:63,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:64,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:65,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:66,YHSMYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:67,YHSTYRNISTNTYESIAYLRYDSYTWAVLAYTWH\nHLA-B18:68,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:69,YHSTYRNISTNTYESNLYIRYDSYTWAVLAYTWH\nHLA-B18:70,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:71,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:72,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:73,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:75,YHSTYRNISTNTYESNLYLRYDSYAWAVLAYTWH\nHLA-B18:76,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:77,YHSTYPNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:78,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:79,YHSTYRNIFTNTYENNLYLRYDSYTWAVLAYTWH\nHLA-B18:80,YHTTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:81,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:82,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:83,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:84,YHSTYRNISTNTYESNLCLRYDSYTWAVLAYTWH\nHLA-B18:85,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:86,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYMWH\nHLA-B18:87,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:88,YHSTYRNISTNTYESNLYLRYDSYTLAVLAYTWH\nHLA-B18:89,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:90,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:91,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:92,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:93,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:95,YHSTYRNISTNTYESNLHLRYDSYTWAVLAYTWH\nHLA-B18:96,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:97,YHSTYRNISTNTYESNLYLRYNSYTWAVLAYTWH\nHLA-B18:98,YHATYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B18:99,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B2701,YHTEYREICAKTYENTAYLNYHDYTWAVLAYEWY\nHLA-B2702,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B2703,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B2704,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B2705,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B2706,YHTEYREICAKTDESTLYLNYDYYTWAELAYEWY\nHLA-B2707,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B2708,YHTEYREICAKTDESNLYLNYHDYTWAVLAYEWY\nHLA-B2709,YHTEYREICAKTDEDTLYLNYHHYTWAVLAYEWY\nHLA-B2710,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B2711,YHTEYREICAKTDESTLYLSYNYYTWAVLAYEWY\nHLA-B2712,YHTEYREICTNTDESNLYLNYHDYTWAVLAYEWY\nHLA-B2713,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B2714,YHTEYREICAKTDEDTLYWTYHDYTWAVLAYEWY\nHLA-B2715,YHTEYREICAKTDESTLYLNYHDYTWAELAYTWY\nHLA-B2716,YHTEYREICTNTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B2717,YHTEFREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B2718,YHTEYREISTNTYESNLYLNYHDYTWAELAYEWY\nHLA-B2719,YHTEYREICAKTDEDTLYIRYHDYTWAVLAYEWY\nHLA-B2720,YHTEYREICAKTDESTLYLNYNYYTWAELAYEWY\nHLA-B2721,YHTEYREICAKTDESTLYLRYDYYTWAELAYEWY\nHLA-B2723,YHTEYRNIFTNTYESTLYLNYHDYTWAVLAYEWY\nHLA-B2724,YHTEYREICAKTDESTLYLSYNYYSWAELAYEWY\nHLA-B2725,YHTEYREICAKTDESTLYLNYHDYTWAEWAYLWY\nHLA-B2726,YHTEYREICAQTDESNLYLNYHDYTWAVLAYEWY\nHLA-B2727,YHTEYREICAKTDEDTLYLNYNYYTWAVLAYEWY\nHLA-B2728,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYTWH\nHLA-B2729,YHTEYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B2730,YHTEYREICAKTDENIAYIRYHDYTWAVLAYEWY\nHLA-B2731,YHTEYREICAQTDESTLYLNYHDYTWAVLAYEWY\nHLA-B2732,YHTEYREICAKTDEDTLYLSYHDYTWAVLAYEWY\nHLA-B2733,YHTEYREICAKTDESNLYLSYNYYTWAVLAYEWY\nHLA-B2734,YHTEYREICAKTDEDTLYLSYDYYTWAVLAYEWY\nHLA-B2735,YHTEYREICAKTDEDTLYLNYNFYTWAVLAYEWY\nHLA-B2736,YHTEYREICAKTDESTLYLNYHDYSLAVLAYEWY\nHLA-B2737,YYTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B2738,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYLWY\nHLA-B27:01,YHTEYREICAKTYENTAYLNYHDYTWAVLAYEWY\nHLA-B27:02,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:03,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:04,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:05,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:06,YHTEYREICAKTDESTLYLNYDYYTWAELAYEWY\nHLA-B27:07,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B27:08,YHTEYREICAKTDESNLYLNYHDYTWAVLAYEWY\nHLA-B27:09,YHTEYREICAKTDEDTLYLNYHHYTWAVLAYEWY\nHLA-B27:10,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B27:100,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:101,YHTEYREICAQTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:102,YHTEYREICAKTDENIAYLNYNYYTWAVLAYEWY\nHLA-B27:103,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:104,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:105,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:106,YHTEYREICAKTDESTLYLNYDYYTWAELAYEWY\nHLA-B27:107,YHTEYREICAKTDESTLYLNYDSYTWAELAYEWY\nHLA-B27:108,YHTEYREICAKTDESTLYLNYHDYTWAERAYEWY\nHLA-B27:109,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:11,YHTEYREICAKTDESTLYLSYNYYTWAVLAYEWY\nHLA-B27:110,YHTEYREICAKTDENTLYLNYHDYTWAVLAYEWY\nHLA-B27:111,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:112,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:113,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:114,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:115,YHTEYREICAKTDESTLYLNYHDYTWAEWAYEWY\nHLA-B27:116,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:117,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:118,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:119,YHTEYREICTNTYENIAYLNYHDYTWAVLAYEWY\nHLA-B27:12,YHTEYREICTNTDESNLYLNYHDYTWAVLAYEWY\nHLA-B27:120,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:121,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:122,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:123,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:124,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:125,YHTEYREICAKTDESTLYLRYNYYSLAVLAYEWY\nHLA-B27:126,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:127,YHTEYREICAKTDEDTLYIRYDDYTWAVDAYLSY\nHLA-B27:128,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:129,YHTEYREICANTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:13,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:130,YHTEYREICAKTDEDTLYWTYNYYTWAVLAYEWY\nHLA-B27:131,YHTEYREICAKTYESTLYLNYHDYTWAVLAYEWY\nHLA-B27:132,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:133,YHTEYREICAKTDEDTLYLNYHDYTWAGLAYEWY\nHLA-B27:134,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:135,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:136,YHTEYREICAKTDEDTLYLNYDYYTWAVLAYEWY\nHLA-B27:137,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:138,YHTEYREICAKTDESTLYLSYDYYTWAELAYEWY\nHLA-B27:139,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYLWY\nHLA-B27:14,YHTEYREICAKTDEDTLYWTYHDYTWAVLAYEWY\nHLA-B27:140,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:141,YHTEYREICAKTDEDTLYLNSHDYTWAVLAYEWY\nHLA-B27:142,YHTEYREICAKTDENTAYLNYHDYTWAVLAYEWY\nHLA-B27:143,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:144,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:145,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:146,NHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:147,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:148,YHTEYPEICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:149,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:15,YHTEYREICAKTDESTLYLNYHDYTWAELAYTWY\nHLA-B27:150,YHTEYREICAKTDEDTLYINYHDYTWAVLAYEWY\nHLA-B27:151,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:152,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:153,YHTEYREICTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B27:154,YHTEYREICAKTDESTLYLNYDYYTWAVLAYEWY\nHLA-B27:155,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:156,YHTEYREICAKTDEDTLCLNYHDYTWAVLAYEWY\nHLA-B27:157,YHTEYRNIYAQTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:158,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:159,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:16,YHTEYREICTNTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:160,YHTEYREICAKTDEDTLYLNYHDYTWAVLTYEWY\nHLA-B27:161,YHTEYREICAKTDEDTLYLNYHDYTWAVWAYEWY\nHLA-B27:162,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:163,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:164,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B27:165,YHTEYREICAKTDESNLYLNYHDYTWAELAYEWY\nHLA-B27:166,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:167,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:168,YHTEYREICAKTDESTLYLNYNYYTWAELAYEWY\nHLA-B27:169,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:17,YHTEFREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:170,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:171,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:172,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:173,YHTEYREICAKTDESTLYLNYHDYSWAELAYEWY\nHLA-B27:174,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:175,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:177,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:178,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:179,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:18,YHTEYREISTNTYESNLYLNYHDYTWAELAYEWY\nHLA-B27:180,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B27:181,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:182,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:183,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:184,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:185,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:186,YHTEYREICAKTDEDTLYLNYQDYTWAVLAYEWY\nHLA-B27:187,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B27:188,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:19,YHTEYREICAKTDEDTLYIRYHDYTWAVLAYEWY\nHLA-B27:20,YHTEYREICAKTDESTLYLNYNYYTWAELAYEWY\nHLA-B27:21,YHTEYREICAKTDESTLYLRYDYYTWAELAYEWY\nHLA-B27:23,YHTEYRNIFTNTYESTLYLNYHDYTWAVLAYEWY\nHLA-B27:24,YHTEYREICAKTDESTLYLSYNYYSWAELAYEWY\nHLA-B27:25,YHTEYREICAKTDESTLYLNYHDYTWAEWAYLWY\nHLA-B27:26,YHTEYREICAQTDESNLYLNYHDYTWAVLAYEWY\nHLA-B27:27,YHTEYREICAKTDEDTLYLNYNYYTWAVLAYEWY\nHLA-B27:28,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYTWH\nHLA-B27:29,YHTEYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:30,YHTEYREICAKTDENIAYIRYHDYTWAVLAYEWY\nHLA-B27:31,YHTEYREICAQTDESTLYLNYHDYTWAVLAYEWY\nHLA-B27:32,YHTEYREICAKTDEDTLYLSYHDYTWAVLAYEWY\nHLA-B27:33,YHTEYREICAKTDESNLYLSYNYYTWAVLAYEWY\nHLA-B27:34,YHTEYREICAKTDEDTLYLSYDYYTWAVLAYEWY\nHLA-B27:35,YHTEYREICAKTDEDTLYLNYNFYTWAVLAYEWY\nHLA-B27:36,YHTEYREICAKTDESTLYLNYHDYSLAVLAYEWY\nHLA-B27:37,YYTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:38,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYLWY\nHLA-B27:39,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:40,YHTEYREICAKTDESNLYLNYHDYTWAELAYEWY\nHLA-B27:41,YHTEYREICAKTDEDTLYLNYDSYTWAVLAYEWY\nHLA-B27:42,YHTEYREICAKTDEDNLYLNYHDYTWAVLAYEWY\nHLA-B27:43,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B27:44,YHTEYREICAKTYESNLYLNYHDYTWAVLAYEWY\nHLA-B27:45,YHTEYREICAKTDEDTLYLNYHDYTWAVRAYEWY\nHLA-B27:46,YHTEYREICAKTDEDTLYLNYHYYTWAVLAYEWY\nHLA-B27:47,YHTEYREICAKTDEDTLYLNYHDYTWAVDAYLSY\nHLA-B27:48,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:49,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:50,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYESY\nHLA-B27:51,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:52,YHTTYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:53,YHTEYREICAKTDEDIAYLNYHDYTWAVLAYEWY\nHLA-B27:54,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B27:55,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:56,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:57,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:58,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:60,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:61,YHTEYREICAKTDESTLYLNYHDYTWAVLAYEWY\nHLA-B27:62,YHTEYREICAKTDENIAYLNYHDYTWAVLAYTWH\nHLA-B27:63,YHTEYREICAKTDESTLYLNYHDYTWAELAYLWY\nHLA-B27:67,YHTMYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:68,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:69,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:70,YHTEYREICAKTDEDTLYLRYHDYTWAVLAYEWY\nHLA-B27:71,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYTWY\nHLA-B27:72,YHTEYREICAKTDEDTLYLNNHDYTWAVLAYEWY\nHLA-B27:73,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:74,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:75,YHSEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:76,YHTEYREICAKTDEDTLYLNYNYYTWAVLAYLWY\nHLA-B27:77,YHTEYREICANTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:78,YHTEYREICAKTDEDTLYLNYHSYTWAVLAYEWY\nHLA-B27:79,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:80,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:81,YHTEYREICAKTDEDTLYWTYHDYTWAVLAYEWY\nHLA-B27:82,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:83,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:84,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:85,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:86,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B27:87,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:88,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:89,YHTEYREICAKTDENNLYLNYHDYTWAVLAYEWY\nHLA-B27:90,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:91,YHTEYREICAKTDESTLYLNYDYYTWAELAYEWY\nHLA-B27:92,YHTEYRNICTNTYEDTLYLNYHDYTWAELAYEWY\nHLA-B27:93,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:95,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B27:96,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:97,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:98,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B27:99,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B3501,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3502,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B3503,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B3504,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B3505,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B3506,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B3507,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3508,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B3509,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B3510,YYATYREIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3511,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B3512,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B3513,YYATYREIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B3514,YYATYRNIFTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B3515,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYEWY\nHLA-B3516,YYATYREIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B3517,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B3518,YYATYRNIFTNTYESNLYIRYNYYTWAVRAYLWY\nHLA-B3519,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3520,YYATYRNISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3521,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWH\nHLA-B3522,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYLWY\nHLA-B3523,YYATYRNIFTNTYESNLYIRFDSYTWAVLAYLWY\nHLA-B3524,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWH\nHLA-B3525,YYSEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3526,YYAEYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3527,YYATYRNIFTNTYENNLYIRYDSYTWAVLAYLWY\nHLA-B3528,YYATYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3529,YYATYRNIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B3530,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B3531,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B3532,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B3533,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYEWY\nHLA-B3534,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B3535,YYATYRNIFTNTYESNLYIRYDSYTWAVLTYTWY\nHLA-B3536,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B3537,YYATYRNIFTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B3538,YYATYRNIFTNTYESNLYIRYDFYTWAVDAYLWY\nHLA-B3539,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B3540,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3541,YYATYRNIFTNTYESNLYIRYDSCTWAVLAYLWY\nHLA-B3542,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3543,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B3544,YYATYRNIFTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B3545,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLSY\nHLA-B3546,YYAMYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3547,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3548,YYATYRNIFTNTYESNLYIRSDSYTWAVLAYLWY\nHLA-B3549,YYAEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3550,YHATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3551,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B3552,YYATYRNIFTNTYECNLYIRYDSYTWAVLAYLWY\nHLA-B3554,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3555,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B3556,YYATYRNIFTNTYENNLYIRYDFYTWAVLAYLWY\nHLA-B3557,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3558,YYATYRNIFTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B3559,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B3560,YYATYRNIFTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B3561,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B3562,YYATYRNIFTNTYESNLYIRYDSYTWAVWAYLWY\nHLA-B3563,YHTKYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3564,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B3565,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B3566,YYATYRNIFTNTYESNLYLSYDSYTWAVRAYEWY\nHLA-B3567,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B3568,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B3569,YYATYREIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B3570,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B3571,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLSY\nHLA-B3572,YYATYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B3573,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B3574,YYATYRNIFTNTYVSNLYIRYDFYTWAVLAYLWY\nHLA-B3575,YYATYRNIFTNTYESNLYLRYDFYTWAVLAYLWY\nHLA-B3576,YYATYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B3577,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:01,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:02,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:03,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:04,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:05,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B35:06,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B35:07,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:08,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:09,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:10,YYATYREIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:100,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:101,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:102,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B35:103,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:104,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:105,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:106,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:107,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:108,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:109,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWH\nHLA-B35:11,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B35:110,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:111,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:112,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:113,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B35:114,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B35:115,YYATYRNIFTNTYESNLYIRYDSYTWAVDAYLWY\nHLA-B35:116,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:117,YYATYRNIFTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B35:118,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B35:119,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:12,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:120,YYATYRNIFTNTYESNLYIRHDSYTWAVLAYLWY\nHLA-B35:121,YYATYRNIFTNTYESNLYIRYHSYTWAVLAYLWY\nHLA-B35:122,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:123,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:124,YYSTYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:125,YYSTYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B35:126,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:127,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:128,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:13,YYATYREIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:131,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:132,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:133,YYATYRNIFTNTYESNLYIRYVSYTWAVLAYLWY\nHLA-B35:135,YYATYRNICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B35:136,YYATYRNIFTNTDESNLYIRYDFYTWAVLAYLWY\nHLA-B35:137,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:138,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:139,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:14,YYATYRNIFTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B35:140,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:141,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:142,YYSTYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:143,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:144,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:146,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:147,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:148,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:149,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:15,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYEWY\nHLA-B35:150,YYTTYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:151,YYATYRNIFTNTYESNLYLRYDFYTWAVLAYLWY\nHLA-B35:152,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:153,YYATYRNIFTNTCESNLYIRYDFYTWAVLAYLWY\nHLA-B35:154,YYAKYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:155,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:156,YYATYRNIFTNTYESNLYIRYDFYTWAVRAYLWY\nHLA-B35:157,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYTWH\nHLA-B35:158,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:159,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:16,YYATYREIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B35:160,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:161,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:162,YHSTYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:163,YYATYRNIFTNTYESNLYIRYDFYTWAVWAYLWY\nHLA-B35:164,YYATYRNIFTNTYESNLYISYNSYTWAVRAYLWY\nHLA-B35:166,YYATYRNIFTNTYESNLYIRYDSYTWAVVAYLWY\nHLA-B35:167,YYATYRNIFTNTYESNLYIRYDFYTWAELAYLWY\nHLA-B35:168,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:169,YYATYRNIFTNTYESNLYIRYDFYTWAVDAYLSY\nHLA-B35:17,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B35:170,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:171,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:172,YYATYRNIFTNTYESNLYIRYNYYTWAVRAYLWY\nHLA-B35:174,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:175,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:176,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:177,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:178,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:179,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:18,YYATYRNIFTNTYESNLYIRYNYYTWAVRAYLWY\nHLA-B35:180,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:181,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:182,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:183,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:184,YYTTYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:185,YYATYRNIFTNTYENNLYLRYDSYTWAEWAYLWY\nHLA-B35:186,YYATYREKYRQTDVSNLYIRYDSYTWAVLAYLWY\nHLA-B35:187,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:188,YYATYRNIFTNTYESNLYIRYDSYTWAELAYTWH\nHLA-B35:189,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:19,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:190,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYTWH\nHLA-B35:191,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:192,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:193,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:194,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:195,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:196,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:197,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:198,YYAEYRNIFTNTDESNLYIRYDFYTWAVLAYLWY\nHLA-B35:199,YYATYRNIFTNTYESNLYLSYDYYTWAVLAYLWY\nHLA-B35:20,YYATYRNISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:200,YYATYRNIFTDTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:201,YYTTYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:202,YYATYQNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:203,YYATYRNIFTNTYEDNLYIRYDSYTWAVLAYLWY\nHLA-B35:204,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:205,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:206,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:207,YYAMYREIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:208,YYATYRNIFTNTYESNLYIVYDSYTWAVLAYLWY\nHLA-B35:209,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:21,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWH\nHLA-B35:210,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:211,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:212,YYATYRNIVTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:213,YYATYRNIFTNTYESNLYIRYDFYTWAEWAYTWY\nHLA-B35:214,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:215,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:217,YHTKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:218,YDATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:219,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:22,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYLWY\nHLA-B35:220,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:221,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:222,YYATYRNIFTNTYESKLYIRYDSYTWAVLAYLWY\nHLA-B35:223,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:224,YYATYRNIFTNSYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:225,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:226,YYATYGEISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:227,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:228,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:229,YYATYRNIFTQTDESNLYIRYDSYTWAVRAYLWY\nHLA-B35:23,YYATYRNIFTNTYESNLYIRFDSYTWAVLAYLWY\nHLA-B35:230,YYATYRNIFTNTYESNLYIRYNYYSLAVLAYEWY\nHLA-B35:231,YHATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:232,YYATYRNIFTNTYESNLYLSYDSYTWAERAYLWY\nHLA-B35:233,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWH\nHLA-B35:234,YYATYRNIFTNTYESNLYLRYHDYTWAVLAYLWY\nHLA-B35:235,YYATYRNIFTNTYESNLYWTYDFYTWAVLAYLWY\nHLA-B35:236,YYATYRNIFTNTYESNLYIRYDFYTWAVLTYLWY\nHLA-B35:237,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:238,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:239,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:24,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWH\nHLA-B35:240,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:241,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:242,YYATYRNIFTNTYESNLYISYDFYTWAVLAYLWY\nHLA-B35:243,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:244,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:245,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:246,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:247,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:248,YYATYRNIFTNTYESNQYIRYDSYTWAVLAYLWY\nHLA-B35:249,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:25,YYSEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:250,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:251,YYATYRNIFTNTYESNLYLRYNYYTWAVLAYLWY\nHLA-B35:252,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:253,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:254,YYAEYRNIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B35:255,YYATYRNIFTNTYESNLYIRYDSCTWAVLAYLWY\nHLA-B35:256,YDATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:257,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:258,YYATYRNIFTNTYESNLYIRFNYYTWAVLAYLWY\nHLA-B35:259,YYATYRNIFTNTYESNLYIRYDSYTWAVLTYLWY\nHLA-B35:26,YYAEYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:260,YYATYRNIFTNTYESNLYIRYDSYTWAVDAYLWY\nHLA-B35:261,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B35:262,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYTWY\nHLA-B35:263,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:264,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:265,YYATYRNIFTNTYESNLYWTYDSYTWAEWAYLWY\nHLA-B35:266,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:267,YYSTYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:268,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:269,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:27,YYATYRNIFTNTYENNLYIRYDSYTWAVLAYLWY\nHLA-B35:270,YYATYREIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:271,YYAMYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:272,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:273,YYATYRNIFTNTYESNLYWTYNYYTWAVLAYLWY\nHLA-B35:274,YYAKYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:275,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:276,YYATYRNIFTNTYESNLYWTYNFYTWAVLTYTWY\nHLA-B35:277,YYATYRNIFTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B35:278,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:279,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:28,YYATYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:280,YHATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:281,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:282,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:283,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:284,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:285,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:286,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:287,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWH\nHLA-B35:288,YYATYRNIFTNTYESNLYMRYDSYTWAVLAYLWY\nHLA-B35:289,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:29,YYATYRNIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B35:290,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:291,YYATYRNIFTNTYESNLYIRYDLYTWAVLAYLWY\nHLA-B35:292,YYATYRNIFTNTYESNLYLRYNYYTWAVLAYLWY\nHLA-B35:293,YYATYRNIFTNTYESNMFIRYDSYTWAVLAYLWY\nHLA-B35:294,YYATYRNICTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B35:295,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:296,YYATYRNISTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:297,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:298,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:299,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:30,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B35:300,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:301,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:302,YYATYRNIFTNTYESNLHIRYDSYTWAVLAYLWY\nHLA-B35:303,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:304,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:305,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:306,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:307,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:308,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:309,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLSY\nHLA-B35:31,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B35:310,YYATYRNIFTNTYEGNLYIRYDSYTWAVLAYLWY\nHLA-B35:311,YYAMYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:312,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:313,YYATYRNIFTNTYKSNLYIRYDSYTWAVLAYLWY\nHLA-B35:314,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:315,YYATYRNIFTNTYESNLNIRYDSYTWAVLAYLWY\nHLA-B35:316,YYATYRNIFTNTDESNLYIRYNYYTWAVLAYLWY\nHLA-B35:317,YHTKYREISTNTYESNLYIVYDSYTWAVLAYLWY\nHLA-B35:318,YYATYREIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B35:319,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:32,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B35:320,YYSTYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:321,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:322,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:323,YHATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:324,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:325,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:326,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:327,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:328,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:329,YYATYRNIFTNTYESTLYIRYDFYTWAVLAYLWY\nHLA-B35:33,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYEWY\nHLA-B35:330,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:331,YYATYRNIFTNTYESNLYIRYDSYSWAVLAYLWY\nHLA-B35:332,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:334,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:335,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:336,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:337,YYATYRNIFTNTYESNLYIRYDSYTWAERAYLWY\nHLA-B35:338,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:339,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:34,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B35:340,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:341,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYEWY\nHLA-B35:342,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:343,YYATYRNIFTNTYESNLYIRYDSYTWAEWAYTWY\nHLA-B35:344,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:345,YYATYRNIFTNTYESNLYIRYDPYTWAVLAYLWY\nHLA-B35:346,YYATYRNIFTNTYESNLYISYDSYTWAVRAYLWY\nHLA-B35:347,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:348,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:349,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:35,YYATYRNIFTNTYESNLYIRYDSYTWAVLTYTWY\nHLA-B35:350,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYTWY\nHLA-B35:351,YYATYRNIFTNTYQSNLYIRYDSYTWAVLAYLWY\nHLA-B35:352,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:353,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:354,YYATYRNIFTNTYESNLYIRYDFYTWAVLSYLWY\nHLA-B35:355,YYATYRNIFTNTYEDNLYIRYDFYTWAVLAYLWY\nHLA-B35:356,YYATYRNIFTNTYVSNLYIRYDSYTWAVRAYLWY\nHLA-B35:357,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:358,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:359,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:36,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:360,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:361,YYAEYREISTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:362,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:363,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:364,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:365,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:366,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:367,YYATYRNIFTNTYESNLYLRYNYYTWAVLAYLWY\nHLA-B35:368,YYATYRNIFAQTYESNLYLSYDSYTWAVLAYLWY\nHLA-B35:369,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:37,YYATYRNIFTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B35:370,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:371,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:372,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:373,YYATYPNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:374,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:375,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:376,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:377,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:378,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:379,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:38,YYATYRNIFTNTYESNLYIRYDFYTWAVDAYLWY\nHLA-B35:380,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:382,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:383,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:384,YYATYRNIFTNTYESNLYIRYNYYTWAELAYLWY\nHLA-B35:385,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:386,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:387,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:388,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:389,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B35:39,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B35:391,YYATYRNIFTNTYESNLHIRYNYYTWAVLAYLWY\nHLA-B35:392,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:393,HYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:394,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B35:395,YYATYRNIFTNTYESNLYIRFDFYTWAVLAYLWY\nHLA-B35:396,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:397,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:398,YYATYRNIFTNTYESNLYIRYDFHTWAVLAYLWY\nHLA-B35:399,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:400,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:401,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:402,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:403,YYATYRNIFTNTCESNLYIRYNYYTWAVLAYLWY\nHLA-B35:404,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:405,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:406,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:407,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B35:408,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:409,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:41,YYATYRNIFTNTYESNLYIRYDSCTWAVLAYLWY\nHLA-B35:410,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:411,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:412,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:413,YYATYRNIFTNTYESNLYIRYNFYTWAEWAYLWY\nHLA-B35:42,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:43,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B35:44,YYATYRNIFTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B35:45,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLSY\nHLA-B35:46,YYAMYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:47,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:48,YYATYRNIFTNTYESNLYIRSDSYTWAVLAYLWY\nHLA-B35:49,YYAEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:50,YHATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:51,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B35:52,YYATYRNIFTNTYECNLYIRYDSYTWAVLAYLWY\nHLA-B35:54,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:55,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:56,YYATYRNIFTNTYENNLYIRYDFYTWAVLAYLWY\nHLA-B35:57,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:58,YYATYRNIFTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B35:59,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B35:60,YYATYRNIFTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B35:61,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:62,YYATYRNIFTNTYESNLYIRYDSYTWAVWAYLWY\nHLA-B35:63,YHTKYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:64,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:66,YYATYRNIFTNTYESNLYLSYDSYTWAVRAYEWY\nHLA-B35:67,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B35:68,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B35:69,YYATYREIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B35:70,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:71,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLSY\nHLA-B35:72,YYATYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B35:74,YYATYRNIFTNTYVSNLYIRYDFYTWAVLAYLWY\nHLA-B35:75,YYATYRNIFTNTYESNLYLRYDFYTWAVLAYLWY\nHLA-B35:76,YYATYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B35:77,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:78,YYATYRNIFANTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:79,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B35:80,YYATYREIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B35:81,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWH\nHLA-B35:82,YYATYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:83,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:84,YHTTYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:85,YYATYRNICTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:86,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B35:87,YYATYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B35:88,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B35:89,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B35:90,YYTTYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:91,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:92,YYATYRNIFTNAYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:93,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYTWY\nHLA-B35:94,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B35:95,YYATYRNISTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B35:96,YYATYRNIFTNTYESNLYIRYDFYTWAELAYTWH\nHLA-B35:97,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B35:98,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B35:99,YYATYRNIFTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B3701,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B3702,YHSTYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B3704,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWH\nHLA-B3705,YHSTYREISTNTYEDNLYIRSNFYTWAVDAYTWY\nHLA-B3706,YHSKYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B3707,YHSTYREISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B3708,YHSTYRNISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B3709,YHSTYREISTNTYEDTLYLSYDYYTWAVDAYTWY\nHLA-B3710,YHSTYREISTNTYENTAYIRSNFYTWAVDAYTWY\nHLA-B3711,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B3712,YHSTYREISTNTYEDTLYIRYNYYTWAVDAYTWY\nHLA-B3713,YHSTYREISTNTYEDTLYIRSNFYTWAEDAYTWY\nHLA-B37:01,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:02,YHSTYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B37:04,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWH\nHLA-B37:05,YHSTYREISTNTYEDNLYIRSNFYTWAVDAYTWY\nHLA-B37:06,YHSKYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:07,YHSTYREISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B37:08,YHSTYRNISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:09,YHSTYREISTNTYEDTLYLSYDYYTWAVDAYTWY\nHLA-B37:10,YHSTYREISTNTYENTAYIRSNFYTWAVDAYTWY\nHLA-B37:11,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B37:12,YHSTYREISTNTYEDTLYIRYNYYTWAVDAYTWY\nHLA-B37:13,YHSTYREISTNTYEDTLYIRSNFYTWAEDAYTWY\nHLA-B37:14,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B37:15,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:17,YHSTYREISTNTYEDTLYIRSNFYTWTVDAYTWY\nHLA-B37:18,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:19,YHSTYREISTNTYEDTLYIRYNFYTWAVDAYTWY\nHLA-B37:20,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:21,YHSTYREIFTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:22,YHATYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:23,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:24,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:25,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:26,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:27,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:28,YHSEYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:29,YHSTYREISTNTYESTLYIRSNFYTWAVDAYTWY\nHLA-B37:31,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:32,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:34,YHSTYREISTNTYENIAYIRSNFYTWAVDAYTWY\nHLA-B37:35,YHSTYREISTNTYEDTLYIRSDSYTWAVDAYTWY\nHLA-B37:36,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:37,YHSTYREISTNTDESNLYIRSNFYTWAVDAYTWY\nHLA-B37:38,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:39,YHSTYREISTNTYEDTLYIRSNFYTWAVRAYLWY\nHLA-B37:40,YHSTYREISTNTYEDTLYIRSNFYTWAVLTYTWY\nHLA-B37:41,YHLTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:43,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:44,YHSTYREISTNTYEDTLYLRYNFYTWAVDAYTWY\nHLA-B37:45,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:46,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:47,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:48,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYLWY\nHLA-B37:49,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:50,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:51,YHSTYREISTNTYEDTLYLRSNFYTWAVDAYTWY\nHLA-B37:52,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:53,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:54,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:55,YYSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:56,YHSTYREISTNTYEDTLYIRYDSYTWAVDAYTWY\nHLA-B37:57,YHSTYREICTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:58,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:59,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:60,YHSTYREICAKTDEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:61,YHSMYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:62,YDSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:63,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:64,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:65,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:66,YHSTYREISTNTYEDTLYIRSNFYTWAVWAYTWY\nHLA-B37:67,YHSTYRNIFTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B37:68,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:69,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:70,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:71,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:72,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:73,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:74,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:75,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:76,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:77,YHSTYREISTNTYEDTLYIRSDFYTWAVDAYTWY\nHLA-B37:78,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B37:80,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B3801,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B3802,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B3803,YYSEYREISTNTDESTAYLRYNFYTWAVLTYTWY\nHLA-B3804,YYSEYREICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B3805,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B3806,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B3807,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B3808,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYMWY\nHLA-B3809,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B3810,YYSEYRNICTNTYENIAYLRYNFYTWAELAYTWY\nHLA-B3811,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B3812,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B3813,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B3814,YYSEYRNICTNTDENIAYLRYNFYTWAVLTYTWY\nHLA-B3815,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B3816,YYTEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:01,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:02,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:03,YYSEYREISTNTDESTAYLRYNFYTWAVLTYTWY\nHLA-B38:04,YYSEYREICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:05,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:06,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:07,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:08,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYMWY\nHLA-B38:09,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:10,YYSEYRNICTNTYENIAYLRYNFYTWAELAYTWY\nHLA-B38:11,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:12,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:13,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:14,YYSEYRNICTNTDENIAYLRYNFYTWAVLTYTWY\nHLA-B38:15,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:16,YYTEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:17,YYSEYRNICTNTYEDTLYLRYNFYTWAVLTYTWY\nHLA-B38:18,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:19,YYSEYRNICTNTYENIAYLSYNFYTWAVLTYTWY\nHLA-B38:20,YYSEYRNICTNTYENIAYIRYNFYTWAVLTYTWY\nHLA-B38:21,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:22,YYSEYRNICTNTYENIAYLNYNFYTWAVLTYKWY\nHLA-B38:23,YYSEYRNICTNTYENTAYFRYNFYTWAVLTYTWY\nHLA-B38:24,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:25,YYSEYREICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:26,YYSEYRNIYAQTDENIAYLRYNFYTWAVLTYTWY\nHLA-B38:27,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:28,YYAEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:29,YYSEYRNICTNTYENTAYLRYDFYTWAVLTYTWY\nHLA-B38:30,YYSEYRNICTNTYENIAYLRYNFYTWAVDAYLWY\nHLA-B38:31,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:32,YYSEYRNICTNTYENIAYLRYNFYTWAELTYTWY\nHLA-B38:33,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:35,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:36,YYSEYRNICTNTYENIAYLRYNFYTWAVLAYTWY\nHLA-B38:37,YYSEYRNICTNTYENIAYLRYNFYTWAVLTHTWY\nHLA-B38:38,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:39,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:40,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:41,YYSEYRNICTNTDESIAYLRYNFYTWAVLTYTWY\nHLA-B38:42,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:43,YYSEYRNICTNTYENTAYLRYNFYTLAVLTYTWY\nHLA-B38:44,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:45,YYSEYRNICTNTYENTAYLRYNFYTWAVLIYTWY\nHLA-B38:46,YYTEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:47,YYSEYRNICTNTYESTAYLRYNFYTWAVLTYTWY\nHLA-B38:48,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:49,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:50,YYSEYRNICTNTYENTAYLRYNFYTWAVLAYTWY\nHLA-B38:51,YYSEYWNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:52,YYSEYRNICTNTYENIAYLRYDSYTWAVLAYTWY\nHLA-B38:53,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:54,YYSEYRNICTDTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:56,YYSEYRKICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:57,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:58,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:59,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:60,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:61,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:62,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:63,YYSEYRNICTNTYENIAYLRYNFYSLAVLAYEWY\nHLA-B38:64,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:65,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYLWY\nHLA-B38:66,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:67,YYSEYRNICTNTYESIAYLRYNFYTWAVLTYTWY\nHLA-B38:69,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:70,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:71,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:72,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:73,YYSEYRNICTNTYENIAYLRYNYYTWAVLTYTWY\nHLA-B38:74,YYSEYRNIFTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:75,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:76,YYSEYRNICTNTYENTACLRYNFYTWAVLTYTWY\nHLA-B38:77,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:78,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:79,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B38:81,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B38:82,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B3901,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3902,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3903,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B3904,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3905,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B3906,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B3908,YYSEYREISTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B3909,YYSEYRNICTNTDESNLYLRSNFYTWAVLTYTWY\nHLA-B3910,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3912,YDSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3913,YYSEYREISTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B3914,YYSEYRNICTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B3915,YYSEYRNICTNTDESNLYLRYDFYTWAVLTYTWY\nHLA-B3916,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3917,YYSEYRNIYTNTDESNLYLRYNLYTWAVLTYTWY\nHLA-B3918,YYSEYRNICTNTDESNLYLRYNFYTWAEWTYTWY\nHLA-B3919,YYSTYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3920,YYSEYRNIYTNTYENNLYLRYNFYTWAVLTYTWY\nHLA-B3922,YYSEYREICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3923,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3924,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B3926,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3927,YYSEYRNICTNTDVSNLYLRYNFYTWAVLTYTWY\nHLA-B3928,YYSEYRNICTNTDESNLYLTYNFYTWAVLTYTWY\nHLA-B3929,YYSEYRNICTNTDESNLYLSYDYYTWAVLTYTWY\nHLA-B3930,YYSEYRNICTNTDESNLYLRYNFYTWAVLAYTWY\nHLA-B3931,YHSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3932,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYLWH\nHLA-B3933,YYSEYRNICTNTDESNLYWTYNFYTWAVRAYLWY\nHLA-B3934,YYSEYRNICTNTDESNLYWTYNFYTWAVLAYTWY\nHLA-B3935,YYSEYRNICTNTDESNLYLRYNFYTWAELTYTWY\nHLA-B3936,YYSEYRNICTNTDESNLYLRYNFYTWAEWAYTWY\nHLA-B3937,YYSEYRNICTNTYESNLYLSYNFYTWAVLTYTWY\nHLA-B3938,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3939,YYSEYRNISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3941,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B3942,YYSEYRNICTNTDESNLYIRYNFYTWAVLTYTWY\nHLA-B39:01,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:02,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:03,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B39:04,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:05,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B39:06,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:07,YYSEYRNICTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B39:08,YYSEYREISTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B39:09,YYSEYRNICTNTDESNLYLRSNFYTWAVLTYTWY\nHLA-B39:10,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:100,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:101,YHSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:102,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYMWY\nHLA-B39:103,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:104,YYSEYRNICTNTDESILYLRYNFYTWAVLTYTWY\nHLA-B39:105,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:106,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:107,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWY\nHLA-B39:108,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:109,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:11,YYSEYRNICTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B39:110,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:111,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:112,YYSEYRNICTNTDESNLYLRSNFYTWAVLAYTWY\nHLA-B39:113,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B39:114,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:115,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:117,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:118,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B39:119,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:12,YDSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:120,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B39:121,YYSGYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:122,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:123,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:124,YYSEYRNICTNTDENNLYLRYNFYTWAVLTYTWY\nHLA-B39:125,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:126,YYSEYRNIYTNTDESNLYLRYNYYTWAVLTYTWY\nHLA-B39:127,YYSEYRNICTNTDESNLYWTSNFYTWAVLTYTWY\nHLA-B39:128,YHSEYRNICTNTDESNLYLRYNFYTWAVLAYTWY\nHLA-B39:129,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:13,YYSEYREISTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B39:130,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:131,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:132,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:134,YYSEYRNICTNTDESNLYLRYNSYTWAVLTYTWY\nHLA-B39:135,YYAEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:136,YYSEYRNICTNTDESNLYLRYNFYTLAALAYTWY\nHLA-B39:137,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:138,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:14,YYSEYRNICTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B39:140,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:141,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:143,YYSEYRNIYTNTDVSNLYLRYNFYTWAVLTYTWY\nHLA-B39:15,YYSEYRNICTNTDESNLYLRYDFYTWAVLTYTWY\nHLA-B39:16,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:17,YYSEYRNIYTNTDESNLYLRYNLYTWAVLTYTWY\nHLA-B39:18,YYSEYRNICTNTDESNLYLRYNFYTWAEWTYTWY\nHLA-B39:19,YYSTYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:20,YYSEYRNIYTNTYENNLYLRYNFYTWAVLTYTWY\nHLA-B39:22,YYSEYREICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:23,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:24,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B39:26,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:27,YYSEYRNICTNTDVSNLYLRYNFYTWAVLTYTWY\nHLA-B39:28,YYSEYRNICTNTDESNLYLTYNFYTWAVLTYTWY\nHLA-B39:29,YYSEYRNICTNTDESNLYLSYDYYTWAVLTYTWY\nHLA-B39:30,YYSEYRNICTNTDESNLYLRYNFYTWAVLAYTWY\nHLA-B39:31,YHSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:32,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYLWH\nHLA-B39:33,YYSEYRNICTNTDESNLYWTYNFYTWAVRAYLWY\nHLA-B39:34,YYSEYRNICTNTDESNLYWTYNFYTWAVLAYTWY\nHLA-B39:35,YYSEYRNICTNTDESNLYLRYNFYTWAELTYTWY\nHLA-B39:36,YYSEYRNICTNTDESNLYLRYNFYTWAEWAYTWY\nHLA-B39:37,YYSEYRNICTNTYESNLYLSYNFYTWAVLTYTWY\nHLA-B39:39,YYSEYRNISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:41,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:42,YYSEYRNICTNTDESNLYIRYNFYTWAVLTYTWY\nHLA-B39:43,YYSEYRNICTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B39:44,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:45,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:46,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:47,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYEWY\nHLA-B39:48,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWH\nHLA-B39:49,YYSEYREISTNTYESNLYLRYDFYTWAVLTYTWY\nHLA-B39:50,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYEWY\nHLA-B39:51,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:52,YYSEYRNICTDTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:53,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:54,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:55,YYSEYRNICTNTYESNLYLRYDFYTWAVLTYTWY\nHLA-B39:56,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B39:57,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:58,YYSEYRNIFTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:59,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:60,YYSEYRNICTNTDESNLYLRYNFYTWAALTYTWY\nHLA-B39:61,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:62,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:63,YYAEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:64,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:65,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:66,YYSEYPNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:67,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:68,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:69,YYSKYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:70,YYSEYRNICTNTDESNLYLRYNYYTWAVLTYTWY\nHLA-B39:71,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:72,YCSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:73,YYSEYRNICTNTDESNLYFRYNFYTWAVLTYTWY\nHLA-B39:74,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYLWY\nHLA-B39:75,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:76,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B39:77,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:78,YYSEYRDICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:79,YYSEYRNICTNTDESNLYLWYNFYTWAVLTYTWY\nHLA-B39:80,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:81,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:82,YYSEYRNICTNTDESNLYLRYNFYTWAALAYTWY\nHLA-B39:83,YYSEYRNIFTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:84,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:85,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:86,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:88,YYSEYRNISTNTDESNLYLRSNFYTWAVLTYTWY\nHLA-B39:89,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:90,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B39:91,YYSEYRNICTNTDESNLYLRYDSYTWAVLTYTWY\nHLA-B39:92,YYSEYGNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:93,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:94,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:96,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:98,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B39:99,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B4001,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4002,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4003,YHTKYREISTNTYESNLYLSYDSYTWAVLAYEWY\nHLA-B4004,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B4005,YHTKYREISTNTYESNLYLSYNYYTWAELAYLWY\nHLA-B4006,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B4007,YHTKYREIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4008,YHTKYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4009,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B4010,YYAKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4011,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B4012,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4013,YHTKYREIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B4014,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B4015,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B4016,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B4018,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B4019,YHTKYREISTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B4020,YHTKYREISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B4021,YYAMYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4023,YHTKYREISTNTYESNLYLRYNYYSWAERAYEWY\nHLA-B4024,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B4025,YHTKYRNISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4026,YHTKYREISTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B4027,YHTKYREISTNTYESNLYLSYNNYTWAVLAYEWY\nHLA-B4028,YHTKYREISTNTYESNLYIRYNYYTWAELAYLWH\nHLA-B4029,YHTKYPEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4030,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B4031,YHTKYREISTNTYESNLYLSYDYYSLAVLAYEWY\nHLA-B4032,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B4033,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B4034,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B4035,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4036,YHTKYREISTNTYESNLYLRYNFYSLAVLAYEWY\nHLA-B4037,YHTKYREISTNTYENNLYLSYNYYTWAVLAYEWY\nHLA-B4038,YHTKYREISTNTYESNLYLRYNSYSLAVLAYEWY\nHLA-B4039,YHTKYREISTNTYESNLYLSYNYYTWAVLAYTWY\nHLA-B4040,YYTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4042,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B4043,YHTKYREISTNTDESNLYLRYNYYSLAVLAYEWY\nHLA-B4044,YHTKYREISTNTYESNLYWTYDYYTWAVLAYEWY\nHLA-B4045,YHTKYREISTNTYESNLYLSYNYYSWAVLAYEWY\nHLA-B4046,YHTEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4047,YHTKYREISTNTYENTAYLRYNYYSLAVLAYEWY\nHLA-B4048,YHTKYREISTNTYESNLYLRYNLYSLAVLAYEWY\nHLA-B4049,YYTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4050,YHTKYREISTNTDESNLYLSYNYYTWAVLAYEWY\nHLA-B4051,YHTKYREISTNTYESNLYLRYNYYSWAELAYTWH\nHLA-B4052,YHTKYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B4053,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B4054,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4055,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4056,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4057,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4058,YYAKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4059,YHTKYREISTNTYESNLYIRYDSYSLAVLAYEWY\nHLA-B4060,YHTKYREISTNTYESNLYLRSDSYSLAVLAYEWY\nHLA-B4061,YHTKYREIYTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4062,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4063,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B4064,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B4065,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4066,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4067,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4068,YHTKYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B4069,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4070,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B4071,YHTKYREISTNTYESNLYLSYNLYTWAVLAYLWY\nHLA-B4072,YHTKYREISTNTYESNLYLRYNYYTLAVLAYEWY\nHLA-B4073,YHTKYREISTNTYVSNLYLRYNYYSLAVLAYEWY\nHLA-B4074,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4075,YHTKYREICTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B4076,YHTKYREISTNTYESKLYLRYNYYSLAVLAYEWY\nHLA-B4077,YHTKYREISTNTYESNLYLRYNFYTLAVLAYEWY\nHLA-B40:01,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:02,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:03,YHTKYREISTNTYESNLYLSYDSYTWAVLAYEWY\nHLA-B40:04,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B40:05,YHTKYREISTNTYESNLYLSYNYYTWAELAYLWY\nHLA-B40:06,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:07,YHTKYREIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:08,YHTKYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:09,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B40:10,YYAKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:100,YHTKYREISTNTYESNLYLRFNYYSLAVLAYEWY\nHLA-B40:101,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:102,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:103,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:104,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:105,YHTKYREISTNTYESNLYLSYNSYTWAVLAYEWY\nHLA-B40:106,YHTKYRNIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:107,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:108,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:109,YHTKYREISTNTYESIAYWTYNYYTWAVLAYEWY\nHLA-B40:11,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B40:110,YYTKYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B40:111,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:112,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:113,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWH\nHLA-B40:114,YHTKYREISTNTYESNLYLRYNYYSWAVLAYEWY\nHLA-B40:115,YHTKYWEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:116,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:117,YHTKYREISTNTYENIAYLRYNYYSLAVLAYEWY\nHLA-B40:119,YHTKYREISTNTYDSNLYLSYNYYTWAVLAYEWY\nHLA-B40:12,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:120,YHTKYREISTNTYESNLYIRYDYYTWAVLAYEWY\nHLA-B40:121,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B40:122,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:123,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:124,YHTKYREISTNTYESNLYLRYHDYSLAVLAYEWY\nHLA-B40:125,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:126,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:127,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:128,YHTKYREISTNTYESNLYLRYNYYSLAVRAYEWY\nHLA-B40:129,YYTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B40:13,YHTKYREIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B40:130,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:131,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:132,YHTKYREISTNTYESNLYLRYNYYSLAVLAYESY\nHLA-B40:134,YHTKYREISTNIYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:135,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:136,YHTKYREISTNTYESNLYLRYNYYTWAVDAYEWY\nHLA-B40:137,YYAMYREISTNTYESNLYIRYNYYSLAVLAYEWY\nHLA-B40:138,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:139,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:14,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B40:140,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:141,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:143,YHTKYREISTNTYESNLYLSFNYYTWAVLAYEWY\nHLA-B40:145,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:146,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:147,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:148,YHTKYREISTNTYESNLYWTYNYYTWAELAYEWY\nHLA-B40:149,YHSKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:15,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B40:150,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:151,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:152,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:153,YHTKYREISTNTYESNLYLRYNYYSLTVLAYEWY\nHLA-B40:154,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:155,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:156,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:157,YHTKYREISTNTYENTAYLSYNYYTWAVLAYEWY\nHLA-B40:158,YHTKYREISTNTYESNLYLRYNSYTWAELAYEWY\nHLA-B40:159,YHTKYREISTNTYESNLYWTYDSYTWAVLAYEWY\nHLA-B40:16,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B40:160,YHTKYREISTNTYESNLYIRYNYYSLAVLAYEWY\nHLA-B40:161,YHTKYREISTNTYESNLYWTYNYYTWAERAYEWY\nHLA-B40:162,YHTKYREIFTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:163,YHTKYREISTNTYESNLYLRYNYYSLAVLAYTWH\nHLA-B40:164,YYTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:165,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:166,YHTKYRNIYAQTDESNLYLRYNYYSLAVLAYEWY\nHLA-B40:167,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:168,YHTKYREISTNTYESNLSLRYNYYSLAVLAYEWY\nHLA-B40:169,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:170,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:171,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:172,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:173,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:174,YHTKYREISTNTYESNLYLSYNYYTWAELAYLWH\nHLA-B40:175,YHTKYREISTNTYESNLCLRYNYYSLAVLAYEWY\nHLA-B40:176,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:177,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:178,YHTKYREISTNTYESNLYLRYNYYSLAVLAYESY\nHLA-B40:179,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:18,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B40:180,YHTKYREISTNTYESNLYLSYHDYTWAVLAYEWY\nHLA-B40:181,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:182,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:183,YHTKYREISTNTYESNLYLRYNYYSLAELAYEWY\nHLA-B40:184,YHTKYREISTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B40:185,YHTTYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:186,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:187,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:188,YHTKYREISTNTYEDTLYLRYNYYSLAVLAYEWY\nHLA-B40:189,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:19,YHTKYREISTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B40:190,YHTKYREISTNTYESNLHWTYNYYTWAVLAYEWY\nHLA-B40:191,CHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:192,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:193,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:194,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:195,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:196,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:197,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:198,YHTKYREISTNTYESNLYLRYNYYSLAERAYEWY\nHLA-B40:199,YHTKYREISTNTYESNLYLRYNYYSLAVLAYTWY\nHLA-B40:20,YHTKYREISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B40:200,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:201,YHTKYREISTNTYESTAYLSYNYYTWAVLAYEWY\nHLA-B40:202,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:203,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:204,YHTKYREVSTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:205,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:206,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:207,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:208,YHTRYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:209,YHTTYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:21,YYAMYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:210,YYTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:211,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:212,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:213,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:214,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:215,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:217,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:218,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:219,YYTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:220,YHTKYREIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:221,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:222,YHTKYREISTNTYESNLYLRYNSYSLAVLAYEWY\nHLA-B40:223,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:224,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:225,YHTKYREISTNTYESNLYLNYNYYTWAVLAYEWY\nHLA-B40:226,YHTKYREISTKTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:227,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:228,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:229,YYTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:23,YHTKYREISTNTYESNLYLRYNYYSWAERAYEWY\nHLA-B40:230,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:231,YHTKYREISTNTYESNLYLRYNYYSWAVDAYEWY\nHLA-B40:232,YHTKYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:233,YHTKYREISTNTYESNLYFRYNYYSLAVLAYEWY\nHLA-B40:234,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:235,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:236,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:237,YHTKYREISTNTYESNLYLRYNYYTLAVLAYEWY\nHLA-B40:238,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:239,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:24,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B40:240,YHTKYREISTNTYESNLYLRYNDYSLAVLAYEWY\nHLA-B40:241,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:242,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:243,YHTKYREISTNTYESNLYLWCNYYTWAVLAYEWY\nHLA-B40:244,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:245,YHTKYREISTNTYESNLYLRSNYYSLAVLAYEWY\nHLA-B40:246,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:247,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:248,YHAKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:249,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:25,YHTKYRNISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:250,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:251,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:252,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:253,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:254,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:255,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:257,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:258,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:259,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:26,YHTKYREISTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B40:260,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:261,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:262,YHTKYREISTNTYESNLYLRYNYYSLALLAYEWY\nHLA-B40:264,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:266,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:267,YHTKYREISTNTYESNLYLSYDSYTWAVLAYEWY\nHLA-B40:268,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:269,YHTKYREISTNTYESNLYWTYNYYTWAVLAYECY\nHLA-B40:27,YHTKYREISTNTYESNLYLSYNNYTWAVLAYEWY\nHLA-B40:270,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:271,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:272,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:273,YHIKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:274,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:275,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:276,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:277,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:278,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:279,YHTKYREISTNTYESNLYLRYNYYSLAVDAYEWY\nHLA-B40:28,YHTKYREISTNTYESNLYIRYNYYTWAELAYLWH\nHLA-B40:280,YHTKYREISTNTYESNLYLRYNFYSLAVLAYEWY\nHLA-B40:281,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:282,YHAKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:283,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:284,YHTKYREISTNTYESNLYLSYDSYTWAVLAYEWY\nHLA-B40:285,YHTKYREISTNTYESNLYLRYNYYSVAVLAYEWY\nHLA-B40:287,YHTKYREISTNTYESNLYLSYNYYTLAVLAYEWY\nHLA-B40:288,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:289,YHTKYREISTNTYESNLHLSYNYYTWAVLAYEWY\nHLA-B40:29,YHTKYPEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:290,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:292,YHTKYREISTNTYESIAYLSYNYYTWAVLAYEWY\nHLA-B40:293,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:294,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:295,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:296,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:297,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:298,YHTKYREISTNTYESNLYWTYNYYSLAVLAYEWY\nHLA-B40:299,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:30,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B40:300,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:301,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:302,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:303,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:304,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:305,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:306,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:307,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:308,YHTTYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:309,YHTKYREISTNTYESNLYLSYNYYTWAELAYEWY\nHLA-B40:31,YHTKYREISTNTYESNLYLSYDYYSLAVLAYEWY\nHLA-B40:310,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:311,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:312,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:313,YHTKYRNISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:314,YHTKYREISTNTYESNLYLSYNFYTWAVLAYEWY\nHLA-B40:315,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:316,YHTKYREISTNTYESNLYLTYNYYSLAVLAYEWY\nHLA-B40:317,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:318,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:319,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:32,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B40:320,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:321,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:322,YHTKYREICTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:323,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:324,YHTKYREISTNTYESNLYLRYNYYSLAVWAYLWY\nHLA-B40:325,YYAKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:326,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:327,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:328,YDTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:329,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:33,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B40:330,YHTKYREISTNTYESNLYWTYNFYTWAVLAYEWY\nHLA-B40:331,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:332,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:333,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:334,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:335,YHTKYREISTNTYESHLYLSYNYYTWAVLAYEWY\nHLA-B40:336,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:339,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:34,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B40:340,YHTKYREISTNTYENIAYWTYNYYTWAVLAYEWY\nHLA-B40:341,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:342,YYTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:343,YHTKYREISTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B40:344,YHTKYREISTNTYERNLYLRYNYYSLAVLAYEWY\nHLA-B40:346,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:347,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:348,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:349,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:35,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:350,YHTTYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:351,YHAKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B40:352,YHTKYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B40:353,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:354,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:355,YHTKYREISTNTYESNLYLRYNYYSWAELAYEWY\nHLA-B40:356,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:357,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:358,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:359,YHTKYREISTNTYESNLYLSYNYYTWVVLAYEWY\nHLA-B40:36,YHTKYREISTNTYESNLYLRYNFYSLAVLAYEWY\nHLA-B40:360,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:362,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:363,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:364,YHAKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:365,YYATYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:366,YHTKYREISTNTYESNLYWRYNYYTWAVLAYEWY\nHLA-B40:367,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:368,YHTKYREISTNTYESNLYLRYNFYSLAVLAYEWY\nHLA-B40:369,YHTKYREIPTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:37,YHTKYREISTNTYENNLYLSYNYYTWAVLAYEWY\nHLA-B40:370,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:371,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:373,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:374,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:375,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:376,YHTKYREISTNTYESNLYWTYNYYTWAALAYEWY\nHLA-B40:377,YHTKYREISTNTYESNLYLRYNLYSLAVLAYEWY\nHLA-B40:378,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:379,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:38,YHTKYREISTNTYESNLYLRYNSYSLAVLAYEWY\nHLA-B40:380,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:381,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:382,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:383,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:384,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:385,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:386,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:387,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:388,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:389,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:39,YHTKYREISTNTYESNLYLSYNYYTWAVLAYTWY\nHLA-B40:390,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:391,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:392,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:393,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:394,YHTKYREISTNTYESIAYLRYNYYSLAVLAYEWY\nHLA-B40:395,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:396,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:397,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:398,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:40,YYTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:400,YHTKYREISTNTYESNQYLRYNYYSLAVLAYEWY\nHLA-B40:401,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:402,YHTKYREISTNTYESNLYLRYNYYSLAVLAYECY\nHLA-B40:403,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:404,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:406,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:407,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:408,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:409,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:410,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:411,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:412,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:413,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:414,YHTKYREISTNTYESNLYLSYDDYTWAVLAYEWY\nHLA-B40:42,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B40:43,YHTKYREISTNTDESNLYLRYNYYSLAVLAYEWY\nHLA-B40:44,YHTKYREISTNTYESNLYWTYDYYTWAVLAYEWY\nHLA-B40:45,YHTKYREISTNTYESNLYLSYNYYSWAVLAYEWY\nHLA-B40:46,YHTEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:47,YHTKYREISTNTYENTAYLRYNYYSLAVLAYEWY\nHLA-B40:48,YHTKYREISTNTYESNLYLRYNLYSLAVLAYEWY\nHLA-B40:49,YYTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:50,YHTKYREISTNTDESNLYLSYNYYTWAVLAYEWY\nHLA-B40:51,YHTKYREISTNTYESNLYLRYNYYSWAELAYTWH\nHLA-B40:52,YHTKYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B40:53,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:54,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:55,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:56,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:57,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:58,YYAKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:59,YHTKYREISTNTYESNLYIRYDSYSLAVLAYEWY\nHLA-B40:60,YHTKYREISTNTYESNLYLRSDSYSLAVLAYEWY\nHLA-B40:61,YHTKYREIYTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:62,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:63,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B40:64,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B40:65,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:66,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:67,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:68,YHTKYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B40:69,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:70,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:71,YHTKYREISTNTYESNLYLSYNLYTWAVLAYLWY\nHLA-B40:72,YHTKYREISTNTYESNLYLRYNYYTLAVLAYEWY\nHLA-B40:73,YHTKYREISTNTYVSNLYLRYNYYSLAVLAYEWY\nHLA-B40:74,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:75,YHTKYREICTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:76,YHTKYREISTNTYESKLYLRYNYYSLAVLAYEWY\nHLA-B40:77,YHTKYREISTNTYESNLYLRYNFYTLAVLAYEWY\nHLA-B40:78,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:79,YHTKYREISTNTYESNLHLRYNYYSLAVLAYEWY\nHLA-B40:80,YHTKYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B40:81,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:82,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:83,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:84,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:85,YHTKYREISTNTYESNLYLSYNYYIWAVLAYEWY\nHLA-B40:86,YHTKYREISTNTYESNLYWTYNFYTWAVLAYEWY\nHLA-B40:87,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:88,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B40:89,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:90,YHTKYREISTNTYESNLYLSYNYYTWAVLAHEWY\nHLA-B40:91,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:92,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B40:93,YHTEYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B40:94,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:95,YHTKYREISTNTYESNLYWTYNYYTWAELAYEWY\nHLA-B40:96,YHTKYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B40:97,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B40:98,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B40:99,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B4101,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B4102,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B4103,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B4104,YHTKYREISTNTYESNLYLSYDYYTWAVDAYTWY\nHLA-B4105,YHTKYREISTNTYESKLYWRYNYYTWAVDAYTWY\nHLA-B4106,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B4107,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B4108,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B41:01,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:02,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:03,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B41:04,YHTKYREISTNTYESNLYLSYDYYTWAVDAYTWY\nHLA-B41:05,YHTKYREISTNTYESKLYWRYNYYTWAVDAYTWY\nHLA-B41:06,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:07,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:08,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B41:09,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:10,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:11,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:12,YHAKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:13,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:14,YHTKYREISTNTYESNLHWRYNYYTWAVDAYTWY\nHLA-B41:15,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:16,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:17,YHTKYREISTNTYESNLYWRYNYYTWAVDAYLWY\nHLA-B41:18,YHTKYREISTNTYESNLYLSYDFYTWAVDAYTWY\nHLA-B41:19,YHTKYREISTNTYESNLYLSYNYYTWAVDAYEWY\nHLA-B41:20,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:21,YHTKYREISTNTYESNLYWRYDYYTWAVDAYTWY\nHLA-B41:22,YHSKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:23,YHTKYREISNNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:24,YHTKYREISTNTYESNLYLSYNSYTWAVDAYTWY\nHLA-B41:25,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:26,YHTKYREISTNTYESNQYWRYNYYTWAVDAYTWY\nHLA-B41:27,YHTKYREISTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B41:28,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:29,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:30,YHTKYREISTNTYESNLYWRYNSYTWAVDAYTWY\nHLA-B41:31,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:32,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:33,YYTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:34,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:35,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:36,YHAKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:37,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:38,YHTKYREISTNTYESNLYLSYNYYTLAVDAYTWY\nHLA-B41:39,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:40,YHTKYREISTNTYESNLYLSYNYYTWAMDAYTWY\nHLA-B41:41,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:42,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:43,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:44,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:46,YHTKYREIFTNTYENIAYLSYNYYTWAVDAYTWY\nHLA-B41:47,YYTKYREISTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B41:48,YHTKYREISTNTYESNLYIRYNYYTWAVDAYTWY\nHLA-B41:49,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:50,YHTKYREISTNTYESNLYWRYNFYTWAVDAYTWY\nHLA-B41:51,YHTKYREISTNTYESNLYLSYDSYTWAVDAYTWY\nHLA-B41:52,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:53,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:54,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B41:55,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B41:56,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B4201,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B4202,YHSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B4204,YYSEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B4205,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B4206,YYSEYRNIYAQTDESNLYLSYNFYTWAVDAYTWY\nHLA-B4207,YYSEYRNIYTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B4208,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B4209,YHSEYRNIYAQTDESNLYLSYDSYTWAVDAYTWY\nHLA-B42:01,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:02,YHSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:04,YYSEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B42:05,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:06,YYSEYRNIYAQTDESNLYLSYNFYTWAVDAYTWY\nHLA-B42:07,YYSEYRNIYTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:08,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:09,YHSEYRNIYAQTDESNLYLSYDSYTWAVDAYTWY\nHLA-B42:10,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:11,YYSEYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B42:12,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:13,YYSEYRNIYAQTDESNLYIRYNYYTWAVDAYTWY\nHLA-B42:14,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:15,YYSEYRNIYAQTDENNLYLSYNYYTWAVDAYTWY\nHLA-B42:16,YYSEYRNIYAQTDESNLYLSYDYYTWAVDAYTWY\nHLA-B42:17,YHSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:18,YHSEYRNIYAQADESNLYLSYNYYTWAVDAYTWY\nHLA-B42:19,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYTWY\nHLA-B42:20,YYSEYRNIYAQTDESNLYLSYNFYTWAVDAYTWY\nHLA-B42:21,YYAEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:22,YYSEYRNIYAQTDESKLYLSYNYYTWAVDAYTWY\nHLA-B42:23,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:24,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B42:25,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B4402,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4403,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4404,YYTKYREISTNTYENTAYIRYDDYTWAVRAYTSY\nHLA-B4405,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B4406,YYATYRNIFTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B4407,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4408,YYTMYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4409,YYTKYREISTNTYESNLYIRYDDYTWAVDAYLSY\nHLA-B4410,YYTKYREISTNTYENTAYIRFNLYTWAVLAYLSY\nHLA-B4411,YYTKYREISTNTYENTPYIRYDDYTWAVDAYLSY\nHLA-B4412,YYTKYRNISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4413,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4414,YYTKYREISTNTYENTAYIRYNDYTWAVDAYLSY\nHLA-B4415,YHTKYREISTNTYESTAYWRYNLYTWAVDAYLSY\nHLA-B4416,YYTKYREISTNTYENTAYIRYDDYTWAVDAYEWY\nHLA-B4417,YYTKYREISTNTYENTAYIRYDSYTWAVDAYLSY\nHLA-B4418,YHTKYREISTNTYENIAYWRYNLYTWAVDAYLSY\nHLA-B4420,YYTKYREISTNTYENTAYWTYDDYTWAVDAYLSY\nHLA-B4421,YYTKYREISTNTYENTAYIRYDDYTWAVDAYESY\nHLA-B4422,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4424,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4425,YYTKYREISTNTYENIAYIRYDYYTWAVDAYLSY\nHLA-B4426,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4427,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4428,YYTKYREISTNTYENTAYIRYDDYTWAVRAYLSY\nHLA-B4429,YYTKYREISTNTYENTAYIRYDDYTWAVLTYLSY\nHLA-B4430,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4431,YYTKYREISTNTYENTAYLRYNYYSLAVLAYESY\nHLA-B4432,YYTKYPEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4433,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4434,YYAKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4435,YYTKYREISTNTYENTAYIRYDDYTWAVEAYLSY\nHLA-B4436,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4437,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLWY\nHLA-B4438,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4439,YYPKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4440,YYTEYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4441,YYTKYREISTNTYENTAYLRYDDYTWAVDAYLSY\nHLA-B4442,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLWY\nHLA-B4443,YYTKYREISTNTYENTAYIRYDSYTWAVLAYLSY\nHLA-B4444,YYTEYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4445,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B4446,YYTKYREISTNTYESNLYIRYDDYTWAVLAYLSY\nHLA-B4447,YYTKYREISTNTYENTAYWTYDDYTWAVLAYLSY\nHLA-B4448,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4449,YDTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4450,YYTKYREISTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B4451,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4453,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4454,YYTKYREISTNTYENTAYLSYDDYTWAVLAYLSY\nHLA-B44:02,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:03,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:04,YYTKYREISTNTYENTAYIRYDDYTWAVRAYTSY\nHLA-B44:05,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:06,YYATYRNIFTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B44:07,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:08,YYTMYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:09,YYTKYREISTNTYESNLYIRYDDYTWAVDAYLSY\nHLA-B44:10,YYTKYREISTNTYENTAYIRFNLYTWAVLAYLSY\nHLA-B44:100,YYTKYREISTNTYENTAYWRYDDYTWAVDAYLSY\nHLA-B44:101,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:102,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:103,YHTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:104,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:105,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:106,YYTKYREISTNTYENTAYLSYDDYTWAVDAYLSY\nHLA-B44:107,YYTKYREISTNTYENTAYIRYDYYTWAVLAYLSY\nHLA-B44:109,YYTKYREISTNTYESTAYIRYDDYTWAVLAYLSY\nHLA-B44:11,YYTKYREISTNTYENTPYIRYDDYTWAVDAYLSY\nHLA-B44:110,YYTKYREISTNTYENTAYISYDDYTWAVLAYLSY\nHLA-B44:111,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:112,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:113,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:114,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:115,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:116,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:117,YYTKYREISTNTYENTAYIRYDFYTWAVDAYLSY\nHLA-B44:118,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:119,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:12,YYTKYRNISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:120,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:121,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:122,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:123,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:124,YYTKYREISTNTYENTAYIRYDDYTWAVLAYESY\nHLA-B44:125,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:126,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:127,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:128,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:129,YYTKYREISTNTYESNLYIRYDDYTWAVLAYLSY\nHLA-B44:13,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:130,YYSEYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:131,YHTKYREISTNTYESNLYIRYDDYTWAVDAYLSY\nHLA-B44:132,YYTKYREISTNTYENTAYIRYDDYTWAVDAYTWY\nHLA-B44:133,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:134,YYTTYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:135,YYTKYREISTNTYENTAYLSYDDYTWAVLAYLSY\nHLA-B44:136,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:137,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:139,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:14,YYTKYREISTNTYENTAYIRYNDYTWAVDAYLSY\nHLA-B44:140,YYTKYREISTNTYENTAYIRYNLYTWAVDAYLSY\nHLA-B44:141,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:142,YYTKYREISTNTYENTAYIRYDDYTWAVDACLSY\nHLA-B44:143,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:144,YYTKYREISTNTYENTAYIRYDSYTWAVLAYLSY\nHLA-B44:145,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:146,YYTKYREISTNTYENTAYIRYDDYTWAEWAYLSY\nHLA-B44:147,YYTKYREISTNTYENTAYIRYDDYTWAVLAHLSY\nHLA-B44:148,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:15,YHTKYREISTNTYESTAYWRYNLYTWAVDAYLSY\nHLA-B44:150,YYTKYREISTNTYENTAYIRYDDYTWAERAYEWY\nHLA-B44:151,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:152,YYTKYREISTNTYENTTYIRYDDYTWAVDAYLSY\nHLA-B44:153,YYTKYREISTNTYENTAYIVYDDYTWAVLAYLSY\nHLA-B44:154,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:155,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:156,YDSEYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:157,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:158,YYTKYREISTNTYENTAYLSYDYYTWAVDAYLSY\nHLA-B44:159,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:16,YYTKYREISTNTYENTAYIRYDDYTWAVDAYEWY\nHLA-B44:161,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:162,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:163,YYTKYREISTNTYENTAYIRYDDYTWAVLAYTSY\nHLA-B44:164,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:165,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:166,YYTKYREISTNTYENTAYLSYNYYTWAVDAYTWY\nHLA-B44:167,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:168,YYAKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:169,YYTKYREISTNTYENTAYIRYDDYSWAVDAYLSY\nHLA-B44:17,YYTKYREISTNTYENTAYIRYDSYTWAVDAYLSY\nHLA-B44:170,YYTKYREISTNTYENAAYIRYDDYTWAVDAYLSY\nHLA-B44:172,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:173,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:174,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:175,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:176,YYTKYREISTNTYESTAYIRYDDYTWAVDAYLSY\nHLA-B44:177,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:178,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:179,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:18,YHTKYREISTNTYENIAYWRYNLYTWAVDAYLSY\nHLA-B44:180,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:181,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:182,YYTKYREISTNTYENTAYIRYDDYSWAVLAYLSY\nHLA-B44:183,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:184,YYTKYREISTNTYENTAYLSYNYYTWAVLAYLSY\nHLA-B44:185,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:186,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:187,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:188,YHTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:189,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:190,YYTKYREISTNTYENTAYIRYDDYTWAVDAYTWH\nHLA-B44:191,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:192,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:193,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:194,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:196,YYTKYREISTNTYENTGYIRYDDYTWAVDAYLSY\nHLA-B44:197,YYTKYREISTNTYENTAYWRYDDYTWAVLAYLSY\nHLA-B44:199,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:20,YYTKYREISTNTYENTAYWTYDDYTWAVDAYLSY\nHLA-B44:200,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:201,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:202,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:203,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSH\nHLA-B44:204,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:205,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:206,YYTKYREISTNTDENTAYIRYDDYTWAVDAYLSY\nHLA-B44:207,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:208,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:209,YYTKYREISTNTYENTAYIRSDDYTWAVLAYLSY\nHLA-B44:21,YYTKYREISTNTYENTAYIRYDDYTWAVDAYESY\nHLA-B44:210,YYTEYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:211,YYTKYREISTNTYENTAYISYDDYTWAVDAYLSY\nHLA-B44:212,YHTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:213,YYTKYREISTNTYENTAYLSYNYYTWAVDAYLSY\nHLA-B44:214,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:215,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:216,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:218,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:219,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:22,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:220,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:221,YYSKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:222,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:223,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:224,YYSKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:225,YYAKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:226,YYTKYREISTNTYENTAYIRYDLYTWAVDAYLSY\nHLA-B44:227,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:228,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:229,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:230,YYTKYREISTNTYENTAYLRYDSYTWAVDAYLSY\nHLA-B44:231,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:232,YYTKYRNIFTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:233,YYTKYREISTNTYENTAYIRYDDYNWAVLAYLSY\nHLA-B44:234,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLLY\nHLA-B44:235,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:236,YYTKYREISTNTYENTAYLRYDYYTWAVDAYLSY\nHLA-B44:238,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:239,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:24,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:240,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:241,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:242,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:243,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:244,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:245,YYAKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:247,YYTKYREISTNTYENTAYIRYDDYTLAALAYLSY\nHLA-B44:248,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:249,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:25,YYTKYREISTNTYENIAYIRYDYYTWAVDAYLSY\nHLA-B44:250,YYTKYREISTNTYENTAYIRYDYYTWAVLAYLSY\nHLA-B44:251,YYTKYREISTNTYENTAYIRYDFYTWAVLAYLSY\nHLA-B44:252,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:253,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:254,YYTKYREISTNTYENNLYIRYDDYTWAVDAYLSY\nHLA-B44:255,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:256,YYTKYREISTNTYENTAYIRYDSYTWAVLAYLSY\nHLA-B44:257,YYTKYREISTNTYEDTLYIRYDDYTWAVDAYLSY\nHLA-B44:258,YYSKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:259,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:26,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:260,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:261,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:262,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:263,YYTKYRNICTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:264,YYTKYREISTNTYENTAYMRYDYYTWAVDAYLSY\nHLA-B44:265,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:266,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:268,YYTKYREISTNTYENTAYIRYDDYTWAVLAYTWY\nHLA-B44:269,YYAKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:27,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:270,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:271,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:272,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:273,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:274,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:275,YYTKYREISTNTYENTAYIRYDDYTWAVWAYLWH\nHLA-B44:276,YYTKYREISTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B44:277,YYTKYREISTNTDESNLYIRYDDYTWAVLAYLSY\nHLA-B44:278,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:279,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:28,YYTKYREISTNTYENTAYIRYDDYTWAVRAYLSY\nHLA-B44:280,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:281,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:282,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:283,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:284,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:285,YYTKYREISTNTYENTAYIRYDDYTWAVLTYLSY\nHLA-B44:286,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:287,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:288,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:289,YYTKYREISTNTYENTAYIRYDDYTWVVDAYLSY\nHLA-B44:29,YYTKYREISTNTYENTAYIRYDDYTWAVLTYLSY\nHLA-B44:290,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLWH\nHLA-B44:291,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:292,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:293,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:294,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:295,YYTKYREISTNTYENTAYIRYDDYTWAALAYLSY\nHLA-B44:296,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:297,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:298,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:299,YYTKYREISTNTYENTAYIRYDDYTWAVWAYLSY\nHLA-B44:30,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:300,YYTKYREISTNTYKNTAYIRYDDYTWAVLAYLSY\nHLA-B44:301,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:302,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:304,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:305,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:307,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:308,YYTKYREISTNTYENTAYIRYDSYTWAVDAYLSY\nHLA-B44:31,YYTKYREISTNTYENTAYLRYNYYSLAVLAYESY\nHLA-B44:311,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:312,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:313,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:315,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:316,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:317,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:318,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSC\nHLA-B44:319,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:32,YYTKYPEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:320,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:321,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:322,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:323,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:324,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:325,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:326,YYTKYPEISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:327,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:329,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:33,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:330,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:331,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:332,YYTKYRELSTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:34,YYAKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:35,YYTKYREISTNTYENTAYIRYDDYTWAVEAYLSY\nHLA-B44:36,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:37,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLWY\nHLA-B44:38,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:39,YYPKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:40,YYTEYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:41,YYTKYREISTNTYENTAYLRYDDYTWAVDAYLSY\nHLA-B44:42,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLWY\nHLA-B44:43,YYTKYREISTNTYENTAYIRYDSYTWAVLAYLSY\nHLA-B44:44,YYTEYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:45,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:46,YYTKYREISTNTYESNLYIRYDDYTWAVLAYLSY\nHLA-B44:47,YYTKYREISTNTYENTAYWTYDDYTWAVLAYLSY\nHLA-B44:48,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:49,YDTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:50,YYTKYREISTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B44:51,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:53,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:54,YYTKYREISTNTYENTAYLSYDDYTWAVLAYLSY\nHLA-B44:55,YHTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:57,YYTMYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:59,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:60,YYTMYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:62,YYTKYREISTNTYENTAYIRYNYYTWAVDAYLSY\nHLA-B44:63,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:64,YYTKYREISTNTYENTAYIRYDDYTWAVLAYEWY\nHLA-B44:65,YYTKYREISTNTYENTAYLRYDDYTWAVLAYLSY\nHLA-B44:66,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:67,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:68,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:69,YYTKYWEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:70,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:71,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:72,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:73,YYTKYREISTNTYENTAYIRYDDYTWAVDGYLSY\nHLA-B44:74,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:75,YYTKYREISTNTYENNLYIRYDYYTWAVDAYLSY\nHLA-B44:76,YYTKYREISTNTYENTAYIRYDDYTWAERAYLSY\nHLA-B44:77,YYTKYREISTNTYENTAYIRYNYYTWAVLAYLSY\nHLA-B44:78,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B44:79,YYTKYREISTNTYENTAYIRYDDYTWAELAYLSY\nHLA-B44:80,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:81,YYTNYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:82,YYTKYREISTNTYENTAYIRYNYYTWAVLAYLSY\nHLA-B44:83,YYATYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:84,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:85,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:86,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:87,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:88,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:89,YYTKYREISTNTYENTAYIRYDDYTWAVDTYLSY\nHLA-B44:90,YYTKYREIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-B44:91,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLWY\nHLA-B44:92,YYTKYREISTNTYENTAYIRYDDYTWAMLAYLSY\nHLA-B44:93,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B44:94,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:95,YYTKYREISTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B44:96,YYTKYREISTNTYENTAYIGYDDYTWAVLAYLSY\nHLA-B44:97,YYTKYREICAKTDENTAYIRYDDYTWAVDAYLSY\nHLA-B44:98,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B44:99,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B4501,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B4502,YHTKYREISTNTYESNLYWRYNFYTWAVDAYLSY\nHLA-B4503,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B4504,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLWY\nHLA-B4505,YHTKYREISTNTYESNLYWRYNLYTWAVDVYLSY\nHLA-B4506,YHTKYREIYAQTDESNLYWRYNLYTWAVDAYLSY\nHLA-B4507,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:01,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:02,YHTKYREISTNTYESNLYWRYNFYTWAVDAYLSY\nHLA-B45:03,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:04,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLWY\nHLA-B45:05,YHTKYREISTNTYESNLYWRYNLYTWAVDVYLSY\nHLA-B45:06,YHTKYREIYAQTDESNLYWRYNLYTWAVDAYLSY\nHLA-B45:07,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:08,YHTKYREISTNTYESNLYWRYNLYTWAVDAYTWY\nHLA-B45:09,YHTKYREISTNTYESNLYWRYDSYTWAVDAYLSY\nHLA-B45:10,YHTKYREISTNTYESNLYWRYNLYTWAVDAYEWY\nHLA-B45:11,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:12,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:13,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:14,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:15,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:16,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:17,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:18,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:19,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:20,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:21,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:22,YHTTYRNISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:23,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B45:24,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B4601,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B4602,YYAMYREKYRQTGVSNLYLRYDSYTWAEWAYLWY\nHLA-B4603,YYAMYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-B4604,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B4605,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B4606,YYAMYREKYRQTDVSNLYLRYDSYSLAVLAYEWY\nHLA-B4608,YYAMYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-B4609,YYAMYREKYRQTDVSNLYLRYDSYTWAVWAYLWY\nHLA-B4610,YYTMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B4611,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYLWY\nHLA-B46:01,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:02,YYAMYREKYRQTGVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:03,YYAMYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-B46:04,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:05,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:06,YYAMYREKYRQTDVSNLYLRYDSYSLAVLAYEWY\nHLA-B46:08,YYAMYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-B46:09,YYAMYREKYRQTDVSNLYLRYDSYTWAVWAYLWY\nHLA-B46:10,YYTMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:11,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYLWY\nHLA-B46:12,YYAMYREKYRQTDVSNLYLSYDSYTWAEWAYLWY\nHLA-B46:13,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYLWY\nHLA-B46:14,YHAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:16,YYAMYREKFRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:17,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLSY\nHLA-B46:18,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYTWY\nHLA-B46:19,YYAMYREKYRQTDVSNLYLRYDSYTWAVLTYLWY\nHLA-B46:20,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:21,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYTWY\nHLA-B46:22,YYAMYREKYRRTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:23,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:24,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:25,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYEWY\nHLA-B46:26,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:27,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:28,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:29,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYTWY\nHLA-B46:30,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYEWY\nHLA-B46:31,YYAMYREKHRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:32,YYAMYREKYRQTDVSNLYWTYDSYTWAEWAYLWY\nHLA-B46:33,YYAMYREKYRQTDVSNLYIRYDSYTWAVLAYLWY\nHLA-B46:34,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:35,YYAMYREKYRQTHVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:36,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:37,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:38,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:39,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:40,YYATYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:42,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:43,YYAMYREKYRQTDVSNLYLRYDSYSLAVLAYLWY\nHLA-B46:44,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:45,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:46,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:47,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:48,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:49,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:50,YYAMYREKYRQTYVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:52,YYAMYREKYRQTDVSNLHLRYDSYTWAEWAYLWY\nHLA-B46:53,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:54,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:56,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:57,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:58,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:59,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:60,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:61,YYAMYREKYRQTDVSNLYLRYNFYTWAEWAYLWY\nHLA-B46:62,YYAMYREKYRQTDVSNLYLRYDSYTWAERAYLWY\nHLA-B46:63,YYAMYREKYRQTDVSNLYLRYDSCTWAEWAYLWY\nHLA-B46:64,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:65,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:66,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:67,YYAMYREKYRQTDVSNLYIRYDSYTWAEWAYLWY\nHLA-B46:68,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:69,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:70,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:71,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:72,YYAMYREKYRQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B46:73,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B46:74,YYAMYREKYRQTDVSNLYLRYNYYTWAEWAYLWY\nHLA-B46:75,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B4701,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B4702,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B4703,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B4704,YYTKYREISTNTYENTAYLNYHDYTWAVLAYEWY\nHLA-B4705,YYTKYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B47:01,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B47:02,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B47:03,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B47:04,YYTKYREISTNTYENTAYLNYHDYTWAVLAYEWY\nHLA-B47:05,YYTKYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B47:06,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B47:07,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B47:08,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYTWY\nHLA-B47:09,YYTKYREISTNTYEDTLYLRFHDYTWAALAYEWY\nHLA-B47:10,YYTKYREISTNTYEDTLYLRFHDYTWAELAYEWY\nHLA-B4801,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B4802,YYSEYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B4803,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B4804,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B4805,YYSEYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B4806,YYSEYRNIFTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B4807,YYSEYREISTNTYESNLYLSYNFYSLAVLAYEWY\nHLA-B4808,YYSEYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B4809,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B4810,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4811,YYSEYREISTNTYESNLYLSYNYYSLAVLAYELY\nHLA-B4812,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4813,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B4814,YYSEYREISTNTYESNLYLSYNSYTLAVLAYEWY\nHLA-B4815,YYSEYREISTNTYESNLYLSYNYYSLAELAYEWY\nHLA-B4816,YYSEYRVISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B4817,YYSEYREISTNTYESNLYIRYNFYSLAVLAYEWY\nHLA-B4818,YYSEYREISTNTYESIAYLSYNYYSLAVLAYEWY\nHLA-B48:01,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:02,YYSEYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B48:03,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B48:04,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:05,YYSEYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B48:06,YYSEYRNIFTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:07,YYSEYREISTNTYESNLYLSYNFYSLAVLAYEWY\nHLA-B48:08,YYSEYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B48:09,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:10,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B48:11,YYSEYREISTNTYESNLYLSYNYYSLAVLAYELY\nHLA-B48:12,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B48:13,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B48:14,YYSEYREISTNTYESNLYLSYNSYTLAVLAYEWY\nHLA-B48:15,YYSEYREISTNTYESNLYLSYNYYSLAELAYEWY\nHLA-B48:16,YYSEYRVISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:17,YYSEYREISTNTYESNLYIRYNFYSLAVLAYEWY\nHLA-B48:18,YYSEYREISTNTYESIAYLSYNYYSLAVLAYEWY\nHLA-B48:19,YYSEYREISTNTYESNLYLSYNYYSLAVWAYEWY\nHLA-B48:20,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:21,YYSEYREISTNTYESNLYLNYNYYSLAVLAYEWY\nHLA-B48:22,YHSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:23,YYSEYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B48:24,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:25,YYSEYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B48:26,YYSEYREISTNTYESNLYLNYHDYSLAVLAYEWY\nHLA-B48:27,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:28,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:29,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:30,YHSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:31,YYSEYREIFTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:32,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:33,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:34,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:35,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:36,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:37,YYSEYREISTNTYESNLYLSYNYYSLAVLAYTWY\nHLA-B48:38,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:39,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:40,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B48:41,YYSKYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:42,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:43,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:44,YYSEYREISTNTYESNLYLSYNYYSWAVLAYEWY\nHLA-B48:45,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:46,YYSEYREISTNTYESNLYLSYDYYSLAVLAYEWY\nHLA-B48:47,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B48:48,YYSEYREISTNTDESNLYLSYNYYSLAVLAYEWY\nHLA-B4901,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B4902,YHTKYREISTNTYENTAYWRYNLYTWAELAYLWY\nHLA-B4903,YHATYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B4904,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B4905,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B49:01,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:02,YHTKYREISTNTYENTAYWRYNLYTWAELAYLWY\nHLA-B49:03,YHATYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:04,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B49:05,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B49:06,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:07,YHTKYREISTNTYENIAYWRYNLYTWAELAYEWY\nHLA-B49:08,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:09,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWH\nHLA-B49:10,YYTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:11,YHTKYREISTNTYENIAYWRYNLYTWAVLAYLWY\nHLA-B49:12,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:13,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:14,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:16,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:17,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:18,YHTKYREISTNTYENIAYWRYNYYTWAELAYLWY\nHLA-B49:20,YHTKYREISTNTYENIAYWRYNLYTWAELAYLSY\nHLA-B49:21,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:22,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:23,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:24,YYTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:25,YDSKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:26,YHTEYRNICTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:27,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:28,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:29,YHTKYREISTNTYENIAYWRYNLYTLAELAYLWY\nHLA-B49:30,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:31,YYTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:32,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:33,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:34,YHTKYREVSTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:35,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:36,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:37,YHTKYREISTNTYENIAYWRYNLYTWAELDYLWY\nHLA-B49:38,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:39,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:40,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:41,YHTKYREISTNTYENIAYWRYNLYTWAERAYLWY\nHLA-B49:42,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:43,YHTKYREISTNTYENIAYWMYNLYTWAELAYLWY\nHLA-B49:44,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:45,YHTKYREISTNTYENIAYWRYNLYTWAALAYLWY\nHLA-B49:46,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:47,YHTKYREISTNTYENIAYLRYNLYTWAELAYLWY\nHLA-B49:48,YHAKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:49,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:50,YHTKYREISTNTYENIAYWRYNFYTWAELAYLWY\nHLA-B49:51,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:52,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:53,YHTKYREIPTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:54,YHTKYREISTNTYENIAYWTYNLYTWAELAYLWY\nHLA-B49:55,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:56,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:57,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:58,YHNKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B49:59,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B5001,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B5002,YHTKYREISTNTYESNLYWRYNLYTWAELAYLSY\nHLA-B5004,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:01,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:02,YHTKYREISTNTYESNLYWRYNLYTWAELAYLSY\nHLA-B50:04,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:05,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:06,YHTRYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:07,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:08,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:09,YHTKYREISTNTYESNLYWRYNFYTWAELAYLWY\nHLA-B50:10,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:11,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:12,YHTKYREISTNTYESNLYGRYNLYTWAELAYLWY\nHLA-B50:13,YYTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:14,YHTKYREISTNTYESNLYWRYNYYTWAELAYLWY\nHLA-B50:15,YHTKYRNISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:16,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:17,YHTKYREISTNTYESNLYIRYNLYTWAELAYLWY\nHLA-B50:18,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:19,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:20,YHTKYREISTNTYESNLYWRFHDYTWAELAYLWY\nHLA-B50:31,YHTKYREISTNTYESNLYWRYNLYTWAELAYEWY\nHLA-B50:32,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:33,YHTKYREISTNTYESNLYWRYDSYTWAELAYLWY\nHLA-B50:34,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:35,YHTKYREISTNTYESNLYWRYNLYTWAELAYLRY\nHLA-B50:36,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:37,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:38,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:39,YHTKYREISTNTYESNLYWRYDLYTWAELAYLWY\nHLA-B50:40,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:41,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:42,YHTKYREISTNTYESNLYWRYNLHTWAELAYLWY\nHLA-B50:43,YYTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:44,YHTKYREISTNTYESNLYWRYNLYTRAELAYLWY\nHLA-B50:45,YHTKYREISTNTYESNLYWRYNLYTWAELAYTWY\nHLA-B50:46,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:47,YHTKYREISTNTYESNLYWRYNLYTWAEWAYLWY\nHLA-B50:48,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:49,YHTKYREISTNTYESNLYWRYNLYTWAVLTYTWY\nHLA-B50:50,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:51,YHTKYREISTNTYESNLYLWYNLYTWAELAYLWY\nHLA-B50:52,YHTKYREISTNTDESNLYWRYNLYTWAELAYLWY\nHLA-B50:53,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:54,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:55,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:56,YHTKYREISTNTYESNLYWRYNLYTWAELAYLSY\nHLA-B50:57,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:58,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWH\nHLA-B50:59,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:60,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B50:61,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B5101,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5102,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B5103,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLGH\nHLA-B5104,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B5105,YYATYRNIFTNTYENIAYWTYNYYTWAVRAYLWY\nHLA-B5106,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWH\nHLA-B5107,YYATYRNISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5108,YYATYRNIFTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B5109,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B5111,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5112,YYATYRNIFTNTYENIADWTYNYYTWAELAYLWH\nHLA-B5113,YYATYRNIFTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B5114,YYATYRNIFTNTYENIAYWTYKYYTWAELAYLWH\nHLA-B5115,YYATYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B5116,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B5117,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5118,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5119,YYATYRNIFTNTYENIAYWTYNYYTWAVLTYLWH\nHLA-B5120,YYATYRNIFTNTDENIAYWTYNYYTWAVDAYLWH\nHLA-B5121,YYATYRNIFTNTYENIAYWTYNYYTWAELAYTWH\nHLA-B5122,YYATYRNICTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5123,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLSY\nHLA-B5124,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5126,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5128,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5129,YYATYRNIFTNTYENIAYWTYNYYTWAERAYLWH\nHLA-B5130,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5131,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWH\nHLA-B5132,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5133,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5134,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWY\nHLA-B5135,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5136,YYATYRNIFTNTYENIAYWTYNYYTWAEDAYTWY\nHLA-B5137,YYATYRNIFTNTYENIAYWTYDSYTWAELAYLWH\nHLA-B5138,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5139,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5140,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B5142,YYATYRNIFTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B5143,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5145,YYATYRNIFTNTYENIAYLRYDSYTWAELAYLWH\nHLA-B5146,YYATYRNIFTNTYENIAYITYNYYTWAELAYLWH\nHLA-B5147,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B5148,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5149,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:01,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:02,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B51:03,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLGH\nHLA-B51:04,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B51:05,YYATYRNIFTNTYENIAYWTYNYYTWAVRAYLWY\nHLA-B51:06,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWH\nHLA-B51:07,YYATYRNISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:08,YYATYRNIFTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B51:09,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B51:10,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWY\nHLA-B51:100,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:101,YYATYRNIFTNTYENIAYWTYNYYTWAELTYTWH\nHLA-B51:102,YYATFRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:103,YHTTYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:104,YYAMYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:105,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:106,YYATYRNIFTNTYENIAYWTYNLYTWAELAYLWH\nHLA-B51:107,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:108,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLSY\nHLA-B51:109,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:111,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:112,YYAKYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:113,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:114,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:115,YYATYRNIFTNTYENIAYWTYNYYTWAALAYLWH\nHLA-B51:116,YHSTYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:117,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:119,YYTTYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:12,YYATYRNIFTNTYENALYWTYNYYTWAELAYLWH\nHLA-B51:12,YYATYRNIFTNTYENIADWTYNYYTWAELAYLWH\nHLA-B51:120,YYATYRNIFTNTYENIAYWTYNYYTWAKLAYLWH\nHLA-B51:121,YYATYRNIFTKTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:122,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B51:123,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:124,YYATYRNIFTNTYENIAYWTCNYYTWAELAYLWH\nHLA-B51:125,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:126,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:127,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:128,YYATYRNIFSNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:129,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:13,YYATYRNIFTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B51:130,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:131,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:132,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:133,YYATYRNIFTNTYENIAYWTYNYHTWAELAYLWH\nHLA-B51:134,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:135,YYATYRNIFTNTYENIAYLRYDYYTWAELAYLWH\nHLA-B51:136,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:137,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:138,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:139,YYATYRNIFTNTYENIAYIRYNYYTWAVLAYLWH\nHLA-B51:14,YYATYRNIFTNTYENIAYWTYKYYTWAELAYLWH\nHLA-B51:140,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:141,YYATYRNIFTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B51:142,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:143,YYATYRNIFTNTYENIAYWTYNYYTWAELAYTWY\nHLA-B51:144,YYATYRNIFTNTYENIAYWTYNYYTWAELAYVWY\nHLA-B51:145,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:146,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B51:147,YYAMYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:148,YYATYRNIFTNTYENIAYLSYNYYTWAVLAYLWY\nHLA-B51:15,YYATYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B51:150,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:151,YHATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:152,YYATYRNIFTNTYENIAYWKYNYYTWAELAYLWH\nHLA-B51:153,YYATYRNIFTNTYENIAYWTYNYYTWAVDAYTWH\nHLA-B51:154,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:155,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:156,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:157,YYATYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B51:158,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:159,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:16,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B51:160,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:161,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:162,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:163,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:164,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:165,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:166,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:167,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:168,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:169,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:17,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:170,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:171,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:172,YYATYRNIFTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B51:174,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:175,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:176,YDSTYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:177,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:179,YYSTYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:18,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:180,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B51:181,YYATYRNIFTNTYENIAYWTYNYYTLAALAYLWH\nHLA-B51:182,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:183,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:185,YYATYRNIFTNTYESIAYWTYNYYTWAELAYLWH\nHLA-B51:186,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:187,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:188,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:189,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:19,YYATYRNIFTNTYENIAYWTYNYYTWAVLTYLWH\nHLA-B51:190,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:191,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:192,YYATYRNIFTNTYENIAYWTYNDYTWAELAYLWH\nHLA-B51:193,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:194,YYATYRNIFTNTYENIAYLRYNYYTWAVLAYLWH\nHLA-B51:195,YYATYRNIFTNTYENIAYWTYNYYIWAELAYLWH\nHLA-B51:196,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:197,YYATYRNIFTNTYENIAYWTYDYYTWAERAYTWY\nHLA-B51:198,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:199,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B51:20,YYATYRNIFTNTDENIAYWTYNYYTWAVDAYLWH\nHLA-B51:200,YYATYRNIFTNTYENIAYWTSNYYTWAELAYLWH\nHLA-B51:201,YYATYRNIFTNTDVNIAYWTYNYYTWAELAYLWH\nHLA-B51:202,YYATYRNIFTNTYENIACWTYNYYTWAELAYLWH\nHLA-B51:203,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:204,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:205,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:206,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:207,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:208,YYATYRNIFTNTDENIAYWTYNYYTWAELAYLWH\nHLA-B51:209,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:21,YYATYRNIFTNTYENIAYWTYNYYTWAELAYTWH\nHLA-B51:210,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:211,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:212,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:213,YYATYRNIFTNTYENIAYLTYNYYTWAELAYLWH\nHLA-B51:214,YYATYRNISTNTYENIAYWTYNDYTWAELAYLWH\nHLA-B51:215,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:216,YYATYRNIFTNTYENIAYWTYDSYTWAELAYLWH\nHLA-B51:217,YYATYRNIFTNTYENIAYWTYNYYTWAVRAYLWH\nHLA-B51:218,YYSTYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:219,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:22,YYATYRNICTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:220,YYATYRNIFTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B51:221,YYATYRNIFTNTYENIAYWTYNYCTWAELAYLWH\nHLA-B51:222,YYATYRNISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:223,YYATYRNIFTNTYENIAYWTYNLYTWAELAYLWH\nHLA-B51:224,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:225,YYATYRNIFTNTYENIAYWTYNLYTWAELAYLWH\nHLA-B51:226,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:227,YYATYRNIFTNTYENIAYIRYDYYTWAELAYLWH\nHLA-B51:228,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:229,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:23,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLSY\nHLA-B51:230,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:231,YYATYRDIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:232,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:233,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:234,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:236,YYATYRNILTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:237,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:238,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:239,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:24,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:240,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:241,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:242,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:243,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:244,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:246,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:247,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:248,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:249,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:250,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:251,YYATYRNISTNTYENIAYWTYNSYTWAELAYLWH\nHLA-B51:252,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:253,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:254,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:255,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:257,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:258,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:259,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B51:26,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:260,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:261,YYATYRNIFTNTYENIAYWTYDDYTWAELAYLGH\nHLA-B51:262,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:263,YYATYRNIFTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B51:265,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:266,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:267,YYAEYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:28,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:29,YYATYRNIFTNTYENIAYWTYNYYTWAERAYLWH\nHLA-B51:30,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:31,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWH\nHLA-B51:32,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:33,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:34,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWY\nHLA-B51:35,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:36,YYATYRNIFTNTYENIAYWTYNYYTWAEDAYTWY\nHLA-B51:37,YYATYRNIFTNTYENIAYWTYDSYTWAELAYLWH\nHLA-B51:38,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:39,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:40,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B51:42,YYATYRNIFTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B51:43,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:45,YYATYRNIFTNTYENIAYLRYDSYTWAELAYLWH\nHLA-B51:46,YYATYRNIFTNTYENIAYITYNYYTWAELAYLWH\nHLA-B51:48,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:49,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:50,YYATYRNIFTNTYENGLYWTYNYYTWAELAYLWH\nHLA-B51:51,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:52,YYATYRNIFTNTHENIAYWTYNYYTWAELAYLWH\nHLA-B51:53,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:54,YYATYRNIFTNTYENTAYWTYNYYTWAVRAYLWY\nHLA-B51:55,YYATYRNIFTNTYENIAYWTYNYYTWAEQAYLWH\nHLA-B51:56,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B51:57,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:58,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:59,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWY\nHLA-B51:60,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:61,YYATYRNIFTNTYENIAYWTYNYYTWAEWAYLWH\nHLA-B51:62,YYATYRNIFTNTYENIAYLRYNLYTWAELAYLWH\nHLA-B51:63,YYATYRNIFTNTYENIAYWTYNSYTWAELAYLWH\nHLA-B51:64,YYATYRNIFTNTYENIAYLSYNYYTWAELAYLWH\nHLA-B51:65,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:66,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:67,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:68,YDATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:69,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:70,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:71,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:72,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:73,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B51:74,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:75,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:76,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:77,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:78,YYATYRNIFTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B51:79,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:80,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:81,YYATYRNIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B51:82,YYATYRNIFTNTYENIAYWTYNYYTWAERAYEWH\nHLA-B51:83,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:84,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:85,YYATYRNIFTNTYENIAYWTYHDYTWAELAYLWH\nHLA-B51:86,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:87,YYATYRNIFTNTYENIAYWTYNYYTWADLAYLWH\nHLA-B51:88,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:89,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:90,YYATYRNIFTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B51:91,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:92,YYATYRNIFTNTYENIAYWTYDFYTWAELAYLWH\nHLA-B51:93,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWY\nHLA-B51:94,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:95,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:96,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B51:97,YYATYRNIFTNTYENIAYWTYNSYTWAVDAYLWH\nHLA-B51:99,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5201,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5202,YYAMYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5203,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B5204,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5205,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5206,YYATYREIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5207,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B5208,YYATYREISTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B5209,YYATYREISTNTYESIAYWTYNYYTWAELAYLWH\nHLA-B5210,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B5211,YYATYREISTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B52:01,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:02,YYAMYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:03,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B52:04,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:05,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:06,YYATYREIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:07,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:08,YYATYREISTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B52:09,YYATYREISTNTYESIAYWTYNYYTWAELAYLWH\nHLA-B52:10,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B52:11,YYATYREISTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B52:12,YYATYREISTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B52:13,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:14,YYATYREISTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B52:15,YYATYREISTNTYENIAYWTYNYYTWAELAYLSH\nHLA-B52:16,YYSEYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:17,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:18,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:19,YYATYREISTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B52:20,YYATYREISTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B52:21,YYATYREISTNTYENIAYWTYNYYTWAEWAYLWH\nHLA-B52:22,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:23,YYATYREISTDTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:24,YYATYREISTNTYENIAYWTYNYYTWAERAYLWH\nHLA-B52:25,YYATYREISTNTYENIAYWTYNLYTWAELAYLWH\nHLA-B52:26,YYATYREISTNTYENIAYWTYDDYTWAELAYLWH\nHLA-B52:27,YYSTYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:28,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:29,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:30,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:31,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:32,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:33,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:34,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:35,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:36,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:37,YYATYREISTNTYENIAYWTYNYYTWAELAYTWH\nHLA-B52:38,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:39,YYATYREISTNTYENIAYWTYNSYTWAELAYLWH\nHLA-B52:40,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:41,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:42,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:43,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:44,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:45,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:46,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:47,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:48,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:50,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:51,YHATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:52,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:53,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:54,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:55,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:56,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:57,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:58,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:59,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:60,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:61,YYATYREISTNTYENIAYWTYNYYTWAEVAYLWH\nHLA-B52:62,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:63,YYATYREISTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B52:64,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:65,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:66,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:67,YYATYREISTNTYENIAYWTYNYYTWAELAYEWY\nHLA-B52:68,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:69,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:70,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:71,YYATYREISTNTYENIAYWTYNYYTWAVLTYTWH\nHLA-B52:72,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:73,YYATYRQISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:74,YYATYREISTNTYENIAYWTYNYYTLAVLAYLWH\nHLA-B52:75,YYATYREISTNTYENIAYWTYDFYTWAELAYLWH\nHLA-B52:76,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:77,YDSTYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:78,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:79,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:80,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:81,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:82,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:83,YYTTYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B52:84,YYATYREISTNTYENIAYWTYNYYTWAELTYTWH\nHLA-B5301,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5302,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWH\nHLA-B5303,YYATYRNIFTNTYEDTLYIRYDSYTWAVLAYLWY\nHLA-B5304,YYATYRNIFTNTYENIAYIRYDFYTWAVLAYLWY\nHLA-B5305,YYATYRNIFTNTYESIAYIRYDSYTWAVLAYLWY\nHLA-B5306,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWH\nHLA-B5307,YYATYRNIFTNTYENIAYIRSNFYTWAVLAYLWY\nHLA-B5308,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B5309,YYATYRNISTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B5310,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5311,YYATYRNIFTNTYENTAYIRYDSYTWAVRAYLWY\nHLA-B5312,YYATYRNIFTNTYESTAYIRYDSYTWAVLAYLWY\nHLA-B5313,YYATYRNIFTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B53:01,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:02,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWH\nHLA-B53:03,YYATYRNIFTNTYEDTLYIRYDSYTWAVLAYLWY\nHLA-B53:04,YYATYRNIFTNTYENIAYIRYDFYTWAVLAYLWY\nHLA-B53:05,YYATYRNIFTNTYESIAYIRYDSYTWAVLAYLWY\nHLA-B53:06,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWH\nHLA-B53:07,YYATYRNIFTNTYENIAYIRSNFYTWAVLAYLWY\nHLA-B53:08,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B53:09,YYATYRNISTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B53:10,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:11,YYATYRNIFTNTYENTAYIRYDSYTWAVRAYLWY\nHLA-B53:12,YYATYRNIFTNTYESTAYIRYDSYTWAVLAYLWY\nHLA-B53:13,YYATYRNIFTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B53:14,YYATYRNIFTNTYENIAYLSYDSYTWAVLAYLWY\nHLA-B53:15,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:16,YYATYRNIFTNTYESIAYIRYDSYTWAVRAYLWY\nHLA-B53:17,YYATYREISTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:18,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:19,YYATYRNIFTNTYENIAYIRYNYYTWAVLAYLWY\nHLA-B53:20,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:21,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:22,YYATYRNIFTNTYENIAYIRYDSYTWAVDAYLSY\nHLA-B53:23,YYATYRNIFTNTDENIAYIRYDSYTWAVLAYLWY\nHLA-B53:24,YYATYRNIFTNTYENIAYIRYDSYTWAVRAYLWY\nHLA-B53:25,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:26,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:27,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:28,YYATYREISTNTYENIAYIRYDSYTWAELAYLWH\nHLA-B53:29,YYATYRNIFTNTYENIAYIGYDSYTWAVLAYLWY\nHLA-B53:30,YYAKYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:31,YYATYRNIFTNTYENTAYIRYDFYTWAVLAYLWY\nHLA-B53:32,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:33,YYATYRNIFTNTYESIAYIRYDFYTWAVLAYLWY\nHLA-B53:34,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:35,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:36,YYATYRNIFTNTYENTAYIRYNYYTWAVLAYLWY\nHLA-B53:37,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:38,YYATYREISTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B53:39,YYAKYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:40,YYATYRNISTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:41,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:42,YYATYRNIFTNTYENIAYIRYDYYTWAVLAYLWY\nHLA-B53:43,YYSTYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:44,YYATYRNIFTNTYENIAYLRYDSYTWAVLAYLWY\nHLA-B53:45,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:46,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:47,YYATYRNIFTNTYENIAYLRYDSYTWAVLAYLWY\nHLA-B53:49,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:50,YYATYRNIFTNTYENIAYIRYNSYTWAVLAYLWY\nHLA-B53:51,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:52,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B53:53,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5401,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5402,YHAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5403,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B5404,YYAGYRNIYAQTDESNLYWTYNYYTWAVLAYTWY\nHLA-B5405,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5406,YYAGYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B5407,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5409,YYAGYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B5410,YYAGYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B5411,YYAGYRNIYAQTDESNLYWTYNYYSWAVLAYTWY\nHLA-B5412,YYAGYRNIYAQTDENIAYWTYNLYTWAVLAYTWY\nHLA-B5413,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:01,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:02,YHAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:03,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B54:04,YYAGYRNIYAQTDESNLYWTYNYYTWAVLAYTWY\nHLA-B54:06,YYAGYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B54:07,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:09,YYAGYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B54:10,YYAGYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B54:11,YYAGYRNIYAQTDESNLYWTYNYYSWAVLAYTWY\nHLA-B54:12,YYAGYRNIYAQTDENIAYWTYNLYTWAVLAYTWY\nHLA-B54:13,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:14,YYAGYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B54:15,YYAGYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B54:16,YYAGYRNIYAQTDESNLYWTYDLYTWAVLAYTWY\nHLA-B54:17,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:18,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:19,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:20,YYAGYRNIYAQTDESNLYWTYNLYTWAERAYTWY\nHLA-B54:21,YYSGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:22,YYAGYRNIYAQTDESNLYWTYNLYSWAVLAYTWY\nHLA-B54:23,YYAGYRNIYAQTEESNLYWTYNLYTWAVLAYTWY\nHLA-B54:24,YYAGYRNIFAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:25,YYAGYRNIYAETDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:26,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYEWY\nHLA-B54:27,YYAGYRNIYAQTDESNLYWTYNLYAWAVLAYTWY\nHLA-B54:28,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:29,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:30,YYAGYRNIYANTYESNLYWTYNLYTWAVLAYTWY\nHLA-B54:31,YYAGYRNIYAQTDESNLYWTYNLCTWAVLAYTWY\nHLA-B54:32,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:33,YYAGYRNIYAQTDESNLYWTYNLYTWAEWAYLWY\nHLA-B54:34,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:35,YYAGYREIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:36,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:37,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B54:38,YYAGYRNIYAQTDESNLYLWCNLYTWAVLAYTWY\nHLA-B5501,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B5502,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5503,YYAEYRNIYAQTDVSNLYWTYNLYTWAELAYTWY\nHLA-B5504,YYAEYRNIYAQTDESNLYLSYNYYTWAVLAYTWY\nHLA-B5505,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B5507,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5508,YYAEYRNIYAQTDESNLYLRYNYYTWAVLAYLWY\nHLA-B5509,YYAEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B5510,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5511,YYAEYRNIYAQTDESNLYWMYNLYTWAELAYTWY\nHLA-B5512,YYAEYRNIYAQTDENNLYWTYNLYTWAVLAYTWY\nHLA-B5513,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYTWY\nHLA-B5514,YYAEYRNIYAQTDESNLYIVYDSYTWAELAYTWY\nHLA-B5515,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B5516,YHAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5517,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B5518,YYAEYREISTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B5519,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5520,YYAEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B5521,YYAEYRNIYAQTDESNLYWTYNLYTWAEWAYTWY\nHLA-B5522,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYEWY\nHLA-B5523,YYAEYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B5524,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYEWY\nHLA-B5525,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B5526,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B5527,YYAEYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B55:01,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:02,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:03,YYAEYRNIYAQTDVSNLYWTYNLYTWAELAYTWY\nHLA-B55:04,YYAEYRNIYAQTDESNLYLSYNYYTWAVLAYTWY\nHLA-B55:05,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:07,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:08,YYAEYRNIYAQTDESNLYLRYNYYTWAVLAYLWY\nHLA-B55:09,YYAEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B55:10,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:11,YYAEYRNIYAQTDESNLYWMYNLYTWAELAYTWY\nHLA-B55:12,YYAEYRNIYAQTDENNLYWTYNLYTWAVLAYTWY\nHLA-B55:13,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYTWY\nHLA-B55:14,YYAEYRNIYAQTDESNLYIVYDSYTWAELAYTWY\nHLA-B55:15,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:16,YHAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:17,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B55:18,YYAEYREISTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B55:19,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:20,YYAEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B55:21,YYAEYRNIYAQTDESNLYWTYNLYTWAEWAYTWY\nHLA-B55:22,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYEWY\nHLA-B55:23,YYAEYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B55:24,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYEWY\nHLA-B55:25,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:26,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:27,YYAEYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B55:28,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B55:29,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:30,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:31,YYAEYRNIYAQTYESNLYWTYNLYTWAELAYTWY\nHLA-B55:32,YYAEYRNIYAQTDESNLYWTYNSYTWAVLAYTWY\nHLA-B55:33,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:34,YYAEYREISAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:35,YYAMYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:36,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:37,YYAEYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B55:38,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:39,YYAEYRNIYAQTDESNLYWTYNLHTWAVLAYTWY\nHLA-B55:40,YYAEYREIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:41,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:42,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:43,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:44,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:45,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:46,YYAEYRNIYAQTDESNLYWTYDLYTWAVLAYTWY\nHLA-B55:47,YYAEYRNISAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:48,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:49,YYAEYRNIYAQTDVSNLYLSYNYYTWAVLAYTWY\nHLA-B55:50,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:51,YYAEYRNIYAQTDESNLYLSYNYYTWAVLAYLWY\nHLA-B55:52,YYAEYRNIYAQTDESNLYWTYNLYTWAERAYTWY\nHLA-B55:53,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:54,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:56,YYAEYRNIYAQTDESNLYWTYNLYTWAEDAYTWY\nHLA-B55:57,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:58,YYAEYRNIYAQTDESNLYWTYNFYTWAELAYTWY\nHLA-B55:59,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:60,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:61,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:62,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:63,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:64,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:65,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:66,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:67,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:68,YYAEYRNIYAQTDESNLYWTYDSYTWAELAYTWY\nHLA-B55:69,YYAEYRNIYAQTDVSNLYWTYNLYTWAVLAYTWY\nHLA-B55:70,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:71,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:72,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:73,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:74,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:75,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:76,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:77,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:78,YYAEYRNIYANTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:79,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:80,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:81,YYAEYRNIYAQTDESNLYWTYNYYTWAVLAYTWY\nHLA-B55:82,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:84,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:85,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:86,YYAEYRNIYAQTDESNLYLSYNYYTRAVLAYTWY\nHLA-B55:87,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:88,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B55:90,YYAEYRNIYAQTDEDTLYWTYNLYTWAELAYTWY\nHLA-B55:91,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:92,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:93,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:94,YYAEYRNIYAQTDESNLYWTYNLCTWAELAYTWY\nHLA-B55:95,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B55:96,YYAEYRNIYAQTDESNLYWTYNLYTWVVLAYTWY\nHLA-B5601,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B5602,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B5603,YYAEYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B5604,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B5605,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B5606,YYATYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B5607,YYAEYRNIYAQTDENTAYWTYNLYTWAVLAYLWY\nHLA-B5608,YYAEYREKYGQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B5609,YYAEYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B5610,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYTWY\nHLA-B5611,YYAEYRNIYAQTDESNLYIRYDFYTWAVLAYLWY\nHLA-B5612,YYAEYRNIYAQTDESNLYIRYNYYTWAVLAYTWY\nHLA-B5613,YYAEYRNIYAQTDESNLYWTYNLYTWAVDAYLWY\nHLA-B5614,YYAEYREKYRQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B5615,YYAEYRNIYAQTDESNLYWTYNYYTWAVLAYLWY\nHLA-B5616,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B5617,YYAEYRNIYANTDESNLYWTYNLYTWAVLAYLWY\nHLA-B5618,YYAEYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B5620,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:01,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:02,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B56:03,YYAEYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B56:04,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B56:05,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B56:06,YYATYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B56:07,YYAEYRNIYAQTDENTAYWTYNLYTWAVLAYLWY\nHLA-B56:08,YYAEYREKYGQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:09,YYAEYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B56:10,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYTWY\nHLA-B56:11,YYAEYRNIYAQTDESNLYIRYDFYTWAVLAYLWY\nHLA-B56:12,YYAEYRNIYAQTDESNLYIRYNYYTWAVLAYTWY\nHLA-B56:13,YYAEYRNIYAQTDESNLYWTYNLYTWAVDAYLWY\nHLA-B56:14,YYAEYREKYRQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:15,YYAEYRNIYAQTDESNLYWTYNYYTWAVLAYLWY\nHLA-B56:16,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:17,YYAEYRNIYANTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:18,YYAEYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B56:20,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:21,YYAEYRNIYAQTDENIAYWTYNYYTWAELAYLWH\nHLA-B56:22,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYLWY\nHLA-B56:23,YYAEYRNIYANTYESNLYWTYNLYTWAVLAYTWY\nHLA-B56:24,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:25,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYLWY\nHLA-B56:26,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:27,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:29,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:30,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:31,YYAEYRNIYAQTDESNLYLRYDSYTWAVLAYTWH\nHLA-B56:32,YYAEYRNIYAQTDESNLYLRYDSYTWAELAYTWY\nHLA-B56:33,YYAEYRNICTNTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:34,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWH\nHLA-B56:35,YHAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:36,YYTEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:37,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYLWY\nHLA-B56:39,YYAMYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:40,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:41,YYAEYRNIYAQTDENNLYWTYNLYTWAVLAYLWY\nHLA-B56:42,YYAEYRNIYTNTYESNLYWTYNLYTWAVLAYLWY\nHLA-B56:43,YYAEYRNIYAQTDESNLYWTYNLYTWAVWAYLWY\nHLA-B56:44,YYAEYRNISTNTYESNLYWTYNLYTWAVLAYLWY\nHLA-B56:45,YYATYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:46,YYAEYRNIYAQTDESTLYWTYNLYTWAVLAYLWY\nHLA-B56:47,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:48,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:49,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:50,YYAEYRNIYAQTDESNLYWTYDSYTWAVLAYLWY\nHLA-B56:51,YYAEYRNIYAQTDESNLYLRYNLYTWAVRAYLWY\nHLA-B56:52,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:53,YYAEYRNIYVQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:54,YYAEYRNIYTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B56:55,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:56,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B56:57,YYAEYQNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B56:58,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:59,YYAEYRNIYAQTDESNLYWTYNLYTWAALAYLWY\nHLA-B56:60,YYAEYRNIYAQTDESNLYIRYNLYTWAVLAYEWY\nHLA-B56:61,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:62,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B56:63,YYAEYRNIYAQTDESNLYLRYNLYTWAVRAYLWY\nHLA-B56:64,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B5701,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B5702,YYAMYGENMASTYENIAYIVYNYYTWAVRAYLWY\nHLA-B5703,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B5704,YYAMYGENMASTYENIAYIVYDDYTWAVRAYLWY\nHLA-B5705,YYAMYGENMASTYENIAYIRYNYYTWAVRAYLWY\nHLA-B5706,YYAMYGENMASTYENIAYIVYDSYIWAVLAYLWY\nHLA-B5707,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLSY\nHLA-B5708,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B5709,YYAMYGENMASTYENIAYIVYNYYTWAEDAYLWY\nHLA-B5710,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B5711,YYAMYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B5712,YYAMYGENMASTYESNLYIVYNYYTWAVRAYLWY\nHLA-B5713,YYAMYGENMASTYENIAYIVYDSYTWAERAYEWY\nHLA-B57:01,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:02,YYAMYGENMASTYENIAYIVYNYYTWAVRAYLWY\nHLA-B57:03,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:04,YYAMYGENMASTYENIAYIVYDDYTWAVRAYLWY\nHLA-B57:05,YYAMYGENMASTYENIAYIRYNYYTWAVRAYLWY\nHLA-B57:06,YYAMYGENMASTYENIAYIVYDSYIWAVLAYLWY\nHLA-B57:07,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLSY\nHLA-B57:08,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:09,YYAMYGENMASTYENIAYIVYNYYTWAEDAYLWY\nHLA-B57:10,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:100,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:101,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:102,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:103,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:104,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:105,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:106,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:107,YYAMYGENMASTYENIAYIVYNSYTWAVLAYLWY\nHLA-B57:108,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:109,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:11,YYAMYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B57:110,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:111,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:112,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:113,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:114,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:12,YYAMYGENMASTYESNLYIVYNYYTWAVRAYLWY\nHLA-B57:13,YYAMYGENMASTYENIAYIVYDSYTWAERAYEWY\nHLA-B57:14,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWH\nHLA-B57:15,YYAMYGENVASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:16,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:17,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:18,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:19,YYAMYGENMASTYENIAYIVYDSYTWAVRAYLWY\nHLA-B57:20,YYAMYGKNMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:21,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:22,YYAMYGENMASTYENIAYIVYDSYTWAELAYLWY\nHLA-B57:23,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:24,YYAMYGENMASTYENIAYIVYDSYTWAVDAYLWY\nHLA-B57:25,YYAMYGENMASTYENIAYIVYDSYTWAVLAYEWY\nHLA-B57:26,YYAMYGENMASTYENIAYIVYDSYTWAVLAYTWY\nHLA-B57:27,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:29,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:30,YYAMYGENMASTYENIAYIVYDSYTWAARAYLWY\nHLA-B57:31,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:32,YYAMYGENMASTYENIAYIVYHDYTWAVLAYLWY\nHLA-B57:33,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:34,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:35,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:36,YYAMYGEHMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:37,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:38,YYAMYGENMASTYETIAYIVYDSYTWAVLAYLWY\nHLA-B57:39,YHAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:40,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:41,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:42,YYAMYGENMASTYENIAYIVYNYYTWAVRAYLWY\nHLA-B57:43,YYAMYGENMASTYENIAYIVYDSYTWAVLAYPWY\nHLA-B57:44,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:45,YYAKYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:46,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:47,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:48,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:49,YDAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:50,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:51,YHTKYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:52,YYAMYGENMASTYENIAYIVYDSYTWAVLASLWY\nHLA-B57:53,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:54,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:55,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:56,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:57,YYAMYGENMASTYENIAYIVYNYYTWAELAYLWY\nHLA-B57:58,YYSMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:59,YYAMYGENMASTYENIAYIVYDSYSLAVLAYLWY\nHLA-B57:60,YYAMYGENMASTYESIAYIVYDSYTWAVLAYLWY\nHLA-B57:61,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:62,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:63,YYAMYGENMASTYENIAYIVYNYYTWAERAYLWY\nHLA-B57:64,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:65,YYAMYGENMASTYENIAYIVYDSYTWVVLAYLWY\nHLA-B57:66,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:67,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:68,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:69,YYTKYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:70,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLCY\nHLA-B57:71,YYTMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:72,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:73,YHAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:74,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:75,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:76,YYAMYGENMASTYENIAYIVYDFYTWAVLAYLWY\nHLA-B57:77,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:78,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:80,YYSMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:81,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:82,YYAMYGENMASTYENIAYLRYNYYTWAVLAYLWY\nHLA-B57:83,YYAMYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B57:84,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:85,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:86,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:87,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:88,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:89,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:90,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:91,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:92,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:93,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLSY\nHLA-B57:94,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:95,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:96,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B57:97,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B57:99,YYAMYGENMASTYENIAYTVYDSYTWAVLAYLWY\nHLA-B5801,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5802,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B5804,YYATYEENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5805,YYATYGENMASTYENIAYIRYDSYTLAALAYTWY\nHLA-B5806,YYATYGENMASTYENIAYLWYDSYTWAELAYLWY\nHLA-B5807,YYATYGENMASTYENIAYLWYDSYTWAVLAYLSY\nHLA-B5808,YYATYGENMASTYENIAYWTYNYYTWAELAYLWH\nHLA-B5809,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWH\nHLA-B5811,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5812,YYSTYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5813,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5814,YYATYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B5815,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:01,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:02,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B58:04,YYATYEENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:05,YYATYGENMASTYENIAYIRYDSYTLAALAYTWY\nHLA-B58:06,YYATYGENMASTYENIAYLWYDSYTWAELAYLWY\nHLA-B58:07,YYATYGENMASTYENIAYLWYDSYTWAVLAYLSY\nHLA-B58:08,YYATYGENMASTYENIAYWTYNYYTWAELAYLWH\nHLA-B58:09,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWH\nHLA-B58:100,YYATYRENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:11,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:12,YYSTYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:13,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:14,YYATYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B58:15,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:16,YYATYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B58:18,YYATYGENMASTYENIAYLSYDSYTWAVLAYLWY\nHLA-B58:19,YYATYGENMASTYENIAYIRYDSYTWAELAYLWY\nHLA-B58:20,YYATYGENMASTYENIAYLRYNFYTWAVLTYTWY\nHLA-B58:21,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:22,YYATYGENMASTYENIAYIRYDSYTWAVRAYLWY\nHLA-B58:23,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:24,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:25,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B58:26,YYATYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B58:27,YYATYGENMASTYENIAYLSYNYYTWAVLAYEWY\nHLA-B58:28,YYATYGENMASTYENIAYIRYNYYTWAVLAYLWY\nHLA-B58:29,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:30,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:32,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:33,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:34,YYATYGENMASTYENIAYIRYDFYTWAVLAYLWY\nHLA-B58:35,YYATYGENMASTYENIAYIRYDSYTWAVLAYTWY\nHLA-B58:36,YYAMYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:37,YYATYGENMASTYENIAYIRYDSYTWAVLAYEWY\nHLA-B58:38,YYATYGENMASTYENIAYLWYDSYTWGVLAYLWY\nHLA-B58:40,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:41,YHATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:42,YYATYGENMASTYENIAFIRYDSYTWAVLAYLWY\nHLA-B58:43,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B58:44,YYAEYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:45,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:46,YYATYGENMASTYENIAYLWYDSYTWAELAYLWY\nHLA-B58:47,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:48,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:49,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:50,YYATYGESMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:51,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:52,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:53,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:54,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:55,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:56,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:57,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:58,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:59,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:60,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B58:61,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:62,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:63,YYATFGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:64,YYATYGENMASTYESNLYIRYDSYTWAVLAYLWY\nHLA-B58:65,YYATYGENMASTYENIAYIRYNYYTWAVLAYLWY\nHLA-B58:66,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:67,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:68,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:69,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:70,YYATYGENMASTYENIACIRYDSYTWAVLAYLWY\nHLA-B58:71,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:73,YYATYGENMASTYENIAYIRYNLYTWAVLAYLWY\nHLA-B58:74,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:75,YYATYGENMASTYEKIAYIRYDSYTWAVLAYLWY\nHLA-B58:76,YYATYGENMASTYENIAYIRYDSYTWAVLAYTWH\nHLA-B58:77,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:78,YYATYGENMASTYENIAYIRYDSYTWAEWAYLWY\nHLA-B58:79,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:80,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:81,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:82,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:83,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:84,YYATYGENMASTYENIAYIRYDSYTWAVLTYTWY\nHLA-B58:85,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:86,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:87,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:88,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:89,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:90,YYATCGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:91,YYATYGENMASTYENIAYIRYDSYTWAELAYLWY\nHLA-B58:92,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:95,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:96,YYATYGENMASTYDNIAYIRYDSYTWAVLAYLWY\nHLA-B58:97,YYATYGENLASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:98,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B58:99,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B5901,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B5902,YYAEYRNIFTNTYENIAYWTYNYYTWAVLAYTWY\nHLA-B59:01,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B59:02,YYAEYRNIFTNTYENIAYWTYNYYTWAVLAYTWY\nHLA-B59:03,YYAEYRNIFTNTYENIAYWTYNFYTWAVLAYTWY\nHLA-B59:04,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B59:05,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B59:06,YYAGYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B59:07,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B59:08,YYAEYRNICTNTYENTAYWTYNLYTWAVLAYTWY\nHLA-B59:09,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYEWY\nHLA-B6701,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B6702,YYSGYREKYRQADVSNLYLRYNFYTWAVLTYTWY\nHLA-B67:01,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B67:02,YYSGYREKYRQADVSNLYLRYNFYTWAVLTYTWY\nHLA-B67:03,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B67:04,YYSEYWNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B67:05,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B67:06,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWH\nHLA-B67:07,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B7301,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B73:01,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B73:02,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B7801,YYATYRNIFTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B7802,YYATYRNIFTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B7803,YYATYRNICTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B7804,YYATYRNIFTNTYESNLYWTYNYYTWAVLAYLWY\nHLA-B7805,YYATYREISTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B78:01,YYATYRNIFTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B78:02,YYATYRNIFTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B78:03,YYATYRNICTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B78:04,YYATYRNIFTNTYESNLYWTYNYYTWAVLAYLWY\nHLA-B78:05,YYATYREISTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B78:06,YYATYREISTNTYENNLYWTYNYYTWAELAYLWH\nHLA-B78:07,YYATYRNIFTNTDESNLYWTYNYYTWAELAYTWH\nHLA-B78:08,YYATYRNIFTNTDESNLYWTYNYYTWAVLAYEWH\nHLA-B78:09,YYATYRNIFTNTYENNLYWTYNYYTWAELAYLWH\nHLA-B78:10,YYATYRNIFTNTYENNLYWTYNYYTWAVLAYLWH\nHLA-B8101,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B8102,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B81:01,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B81:02,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B81:03,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B81:05,YYSEYRNIFAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B81:06,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B81:07,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B81:08,YYSEYRNIYAQTYESNLYLSYNYYSLAVLAYEWY\nHLA-B8201,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B8202,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B82:01,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B82:02,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B82:03,YYSEYRNIYAQTDESNLYLRYNLYTWAVDAYLSY\nHLA-B8301,YYSEYRNIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-B83:01,YYSEYRNIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-B9501,YYAMYREIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B9502,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B9503,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B9504,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B9505,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B9506,YYAKYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B9507,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B9508,YYSEYRNICTNTYESNLYLRYDSYTWAELTYLWY\nHLA-B9509,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B9510,YYAMYREISTNTYESNLYLRCDSYTWAEWAYLWY\nHLA-B9512,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B9513,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B9514,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B9515,YYSEYRNICTNTYESTAYLRYDSYTWAELAYLWY\nHLA-B9516,YYAMYREISTNTYESNLYLRYDSYSLAEWAYLWY\nHLA-B9517,YYAMYREISTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B9518,YYAMYREISTNTYESNLYLMYDSYTWAEWAYLWY\nHLA-B9519,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B9520,YYAMYRDISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B9521,YYAMYRNISTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B9522,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B9523,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B9524,YYSEYRNICTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B9525,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B9526,YYAMYREISTNTYESNLYLSYDSCTWAEWAYLWY\nHLA-B9527,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B9528,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B9529,YYAMYREISTNTYESNLYLNYDSYTWAEWAYLWY\nHLA-B9530,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B9532,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-C0102,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C0103,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C0104,YFSGYREKYRQTDVSNLYLWCDSYTWAEWAYTWY\nHLA-C0105,YFSGYREKYRQTDVSNLYLRSDYYTWAERAYTWY\nHLA-C0106,YFSGYREKYRQTDVSNLYLWCDYYTWAVRAYTWY\nHLA-C0107,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C0108,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C0109,YFSGYREKYRQTDVSNLYLWCDYYTWAEWAYTWY\nHLA-C0110,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYEWY\nHLA-C0111,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C0112,YFSGYREKYRQTDVSNLYLWYDYYTWAERAYTWY\nHLA-C0113,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:02,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:03,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C01:04,YFSGYREKYRQTDVSNLYLWCDSYTWAEWAYTWY\nHLA-C01:05,YFSGYREKYRQTDVSNLYLRSDYYTWAERAYTWY\nHLA-C01:06,YFSGYREKYRQTDVSNLYLWCDYYTWAVRAYTWY\nHLA-C01:07,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:08,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:09,YFSGYREKYRQTDVSNLYLWCDYYTWAEWAYTWY\nHLA-C01:10,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYEWY\nHLA-C01:100,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:101,YFSGYREKYRQTDVSNLYLWYDYYTWAERAYTWY\nHLA-C01:102,YFSGYREKYRQTDVSNLYLWYDSYTWAERAYTWY\nHLA-C01:103,YFSGYREKYRQTDVSNLYLWCDHYTWAERAYTWY\nHLA-C01:104,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:105,YFSGYREKYRQTDVSNLYLWCDYYTGAERAYTWY\nHLA-C01:106,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:107,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYLWY\nHLA-C01:108,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:11,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:110,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:112,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:113,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:114,YFSGYREKYRQTDVSNLYLWFDYYTWAERAYTWY\nHLA-C01:115,YFSGYREKYRQTDVSNLYLWCDDYTWAERAYTWY\nHLA-C01:116,YFSGYREKYRQTDVSNLYLWCNYYTWAERAYTWY\nHLA-C01:118,YFSGYRENYRQTDVNNLYLWCDYYTWAERAYTWY\nHLA-C01:119,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:12,YFSGYREKYRQTDVSNLYLWYDYYTWAERAYTWY\nHLA-C01:120,YFSGYREKYRQTDVSNLYLRCDYYTWAERAYTWY\nHLA-C01:122,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:123,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:124,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:125,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:126,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:127,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:128,YFSGYRENYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C01:129,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:13,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:130,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:131,YFSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C01:132,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:133,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:134,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:135,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:136,YFSGYREKYRQTDVSNLYLWCDLYTWAERAYTWY\nHLA-C01:138,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:139,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:14,YFSGYREKYRQTDVNKLYLWCDYYTWAERAYTWY\nHLA-C01:140,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYAWY\nHLA-C01:141,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:142,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:144,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:146,YFSGYREKYRQTDVSNLYLWCDFYTWAERAYTWY\nHLA-C01:147,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:148,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:149,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:15,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:150,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:151,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:152,YFSGYREKYRQADVSNLYLWCDSYTWAERAYTWY\nHLA-C01:153,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:154,YFSGYREKYRQTDVSKLYLWCDYYTWAERAYTWY\nHLA-C01:155,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:156,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:157,YFSGYREKYRQADESNLYLWCDYYTWAERAYTWY\nHLA-C01:158,YFSGYREKYRQTDVSNLYFWCDYYTWAERAYTWY\nHLA-C01:159,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:16,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:160,YFSGYREKYRQTDVSNLYLWCDYYTWAALAYTWY\nHLA-C01:161,YFSGCREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:162,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:163,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:164,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:165,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:166,YFSGYWEKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:167,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:168,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:169,YYSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:17,YFSGYREKYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C01:170,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:172,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:173,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:174,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:175,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:176,YFSGYREKYRQTDVSNLYIWCDYYTWAERAYTWY\nHLA-C01:18,YFSGYREKYHQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:19,YFSGYREKYRQTDVCNLYLWCDYYTWAERAYTWY\nHLA-C01:20,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:21,YFSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C01:22,YFSGYREKYRQTDVSNLYLWCDYYTWAELAYTWY\nHLA-C01:23,YFSGYREKYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C01:24,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C01:25,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:26,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:27,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:28,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:29,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTCY\nHLA-C01:30,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWH\nHLA-C01:31,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYMWY\nHLA-C01:32,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:33,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:34,YFSGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C01:35,YFSGYREKYRQTDVSNLYLWCDYYTWAELAYLWY\nHLA-C01:36,YFSGYREKYRQTDVSNLYLRFDYYTWAERAYTWY\nHLA-C01:38,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:39,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:40,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:41,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:42,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:43,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:44,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:45,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:46,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:47,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:48,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:49,YFSGYREKYRQTDVSNLYIRYDYYTWAERAYTWY\nHLA-C01:50,YFSGYREKYRQTDVSNLYIRYDYYTWAERAYTWY\nHLA-C01:51,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:52,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:53,YFSGYREKYRQTDVSNLYLWCDYCTWAERAYTWY\nHLA-C01:54,YFSGYREKYRQTDVSNLYLWCDSYTWAERAYTWY\nHLA-C01:55,YFSGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C01:57,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:58,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:59,YFSGYREKYRQTDVNNLYLWCDYYTWAERAYTWY\nHLA-C01:60,YFSGYREKYRQTDVSNLYLWCDYYTLAERAYTWY\nHLA-C01:61,YFSGYREKYRQTDVRNLYLWCDYYTWAERAYTWY\nHLA-C01:62,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:63,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:64,YFAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:65,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:66,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:67,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:68,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:70,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:71,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:72,YFSGYREKYRRTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:73,YFSMYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:74,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:75,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:76,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:77,YFSGYREKYRQTDVSNLYLWSDYYTWAERAYTWY\nHLA-C01:78,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C01:79,YFSGYREKYRQTDVSNLYLRYDYYTWAERAYTWY\nHLA-C01:80,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:81,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:82,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:83,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:84,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:85,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:87,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:88,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:90,YFSGYREKYRQTDVSNLYLWCNLYTWAERAYTWY\nHLA-C01:91,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:92,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:93,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:94,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:95,YFSGYREKYRQTDESNLYLWCDYYTWAERAYTWY\nHLA-C01:96,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C01:97,YFSGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C01:99,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C0202,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C0203,YYAGYREKYRQTDVNKLYLRYDSYTWAVLAYEWY\nHLA-C0204,CYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C0205,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYEWY\nHLA-C0206,YYAGYREKYRQTDVNKLYLRYDLYTWAEWAYEWY\nHLA-C0207,YYAGYREKYRQTDVNKLYLRYHDYTWAEWAYEWY\nHLA-C0208,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C0209,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C0210,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C0211,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C0212,YYAGYREKYRQADVSKLYLRYDSYTWAEWAYEWY\nHLA-C0213,YYAGYREKYRQTDVNKLYLRYDSYTWAAWAYEWY\nHLA-C0214,YDAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:02,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:03,YYAGYREKYRQTDVNKLYLRYDSYTWAVLAYEWY\nHLA-C02:04,CYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:05,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYEWY\nHLA-C02:06,YYAGYREKYRQTDVNKLYLRYDLYTWAEWAYEWY\nHLA-C02:07,YYAGYREKYRQTDVNKLYLRYHDYTWAEWAYEWY\nHLA-C02:08,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:09,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:10,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:100,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:101,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:102,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:103,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:104,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:106,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:107,YDSGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:108,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:109,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:11,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:110,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:111,YYAGYREKYRQTDVNKLYLRYDFYTWAEWAYEWY\nHLA-C02:112,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:113,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:114,YYAGYREKYRQTDVNKLCLRYDSYTWAEWAYEWY\nHLA-C02:115,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYEWY\nHLA-C02:116,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:117,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:118,YYAGYREKYRQTDVNKLYLRYNSYTWAEWAYEWY\nHLA-C02:119,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:12,YYAGYREKYRQADVSKLYLRYDSYTWAEWAYEWY\nHLA-C02:120,YYAGYWEKYRQTDVNKLHLRYDSYTWAEWAYEWY\nHLA-C02:122,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:123,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:124,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:125,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:126,YYAGYREKYRQTDVSKLYLRYDSYTWAEWAYEWY\nHLA-C02:127,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:128,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:129,YYAGYREKYRQTDVTKLYLRYDSYTWAEWAYEWY\nHLA-C02:13,YYAGYREKYRQTDVNKLYLRYDSYTWAAWAYEWY\nHLA-C02:130,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:131,YYAGYREKYRQTDESNLYLRYDSYTWAEWAYEWY\nHLA-C02:132,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:133,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:134,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:136,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:137,YYAGYREKYRQTDVNKLYLRYDSYTWAELAYEWY\nHLA-C02:138,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:139,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:14,YDAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:140,YYAGYREKYRQTDVNKLYLKYDSYTWAEWAYEWY\nHLA-C02:141,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWC\nHLA-C02:142,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWH\nHLA-C02:143,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:144,YYAGYPEKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:145,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:146,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:147,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:148,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:149,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:15,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:151,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:152,YYSGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:153,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:154,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:155,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:156,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:157,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:158,YYAGYREKYRQTDVNKLYLRYHDYTWAEWAYEWY\nHLA-C02:159,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:16,YYAGYREKYRQTDVNKLYLRYDSYTWAELAYEWY\nHLA-C02:160,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:161,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:162,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:163,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:164,YDAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:166,YYAGYREKYHQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:17,YYAGYREKYRQTDVNKLYLWFDSYTWAEWAYEWY\nHLA-C02:18,YYAGYREKYRQTDVNKLYLRYDSYTWAALAYEWY\nHLA-C02:19,YYAGYREKYRQTDVNKLYLRYDYYTWAEWAYEWY\nHLA-C02:20,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:21,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:22,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYEWY\nHLA-C02:23,YYAGYREKYRQTDVNKLYLRYDYYTWAEWAYEWY\nHLA-C02:24,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:26,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:27,YYAGYREKYRQTDVSNLYLRYDSYTWAEWAYEWY\nHLA-C02:28,YYAGYREKYRQTDVNKLYLRYDSYTWAVWAYEWY\nHLA-C02:29,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:30,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:31,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:32,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYEWY\nHLA-C02:33,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:34,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:35,YYAGYREKYRQTDVNKLHLRYDSYTWAEWAYEWY\nHLA-C02:36,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:37,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:39,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:40,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:42,YYSGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:43,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYTWY\nHLA-C02:44,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:45,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:46,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:47,YYAGYREKYRQTDVNKLYLRYDLYTWAEWAYEWY\nHLA-C02:48,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:49,YYAGYREKYRQADVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:50,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:51,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:53,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:54,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:55,YYAGYREKYRQADVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:56,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:57,YYAGYREKYRQTDVNKLYFRYDSYTWAEWAYEWY\nHLA-C02:58,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYLWY\nHLA-C02:59,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:60,YYAGYREKYRQTDVNKLYLSYDSYTWAEWAYEWY\nHLA-C02:61,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:62,YYAGYREKYRQTDVNKLYIRYDSYTWAEWAYEWY\nHLA-C02:63,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:64,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:65,YYAGYREKYRQTDVNNLYLRYDSYTWAEWAYEWY\nHLA-C02:66,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:68,YYAGYREKYRQTDVNKLYLRYNSYTWAEWAYEWY\nHLA-C02:69,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:70,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:71,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:72,YYAGYREKYRQTDVNKLYLRYDSYTWAEWVYEWY\nHLA-C02:73,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:74,YYAGYREKYRQTDVNKLYVRYDSYTWAEWAYEWY\nHLA-C02:75,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:76,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:77,YYAGYREKYRQTDVNKLYLRSDSYTWAEWAYEWY\nHLA-C02:78,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:79,YYAGYGEKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:80,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:81,YYAGYREKYRQTDVNKLYRRYDSYTWAEWAYEWY\nHLA-C02:82,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYESY\nHLA-C02:83,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:84,YYAGYREKYRRTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:85,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:86,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:87,YYAGYREKYRQTDVSNLYLRYDSYTWAEWAYEWY\nHLA-C02:88,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:89,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:90,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:91,YYAGYREKYRQTDVNKLYLRYGSYTWAEWAYEWY\nHLA-C02:93,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:94,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:95,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:96,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:97,YYAGYREKYRQTDVNKLYLRYDDYTWAEWAYEWY\nHLA-C02:98,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C02:99,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C0301,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C0302,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C0303,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0304,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0305,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C0306,YYAGYREKYRQTDVSNLYIRYVYYTWAELAYLWY\nHLA-C0307,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C0308,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0309,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0310,YYAGYREKYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C0311,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0312,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C0313,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C0314,YYAGYREKYRQTDVSNLYIRYDSYTLAALAYTWY\nHLA-C0315,YYAGYREKYRQADVNKLYLRYDSYTWAELAYLWY\nHLA-C0316,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYTWY\nHLA-C0317,YYAGYREKYRQTDVSNLYLWYDYYTWAELAYLWY\nHLA-C0318,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0319,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C0321,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C0322,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0323,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0324,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C0325,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C03:01,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:02,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:03,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:04,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:05,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C03:06,YYAGYREKYRQTDVSNLYIRYVYYTWAELAYLWY\nHLA-C03:07,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C03:08,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:09,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:10,YYAGYREKYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C03:100,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:101,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:102,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C03:103,YYAGYREKYRQTDVSNLYIRSDYYTWAELAYLWY\nHLA-C03:104,YYAGYPEKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:105,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:106,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:107,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:108,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:109,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:11,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:110,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:111,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:112,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:113,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C03:114,YYAGYREKYRQTDVSNLYIRYGYYTWAELAYLWY\nHLA-C03:115,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:116,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:117,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYTWY\nHLA-C03:118,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:119,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C03:12,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C03:120,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:122,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:123,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:124,YYAGYWEKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:125,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:126,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:127,YYAGYREKYRQTDESNLYIRYDYYTWAELAYLWY\nHLA-C03:128,YYAGYREKYRQTDVSNLYIRCDYYTWAELAYLWY\nHLA-C03:129,YYAGYREKYRQTDVSNLYIRYDYYSWAELAYLWY\nHLA-C03:13,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C03:130,YYAGYREKYRQADVNNLYIRYDYYTWAELAYLWY\nHLA-C03:131,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:132,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:133,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:134,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:135,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C03:136,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C03:137,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:138,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:139,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:14,YYAGYREKYRQTDVSNLYIRYDSYTLAALAYTWY\nHLA-C03:140,YYAGYREKYRQTDVNNLYIRYDYYTWAELAYLWY\nHLA-C03:141,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:142,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYEWY\nHLA-C03:143,YYAGYREKYRQTDVSNLYISYDYYTWAELAYLWY\nHLA-C03:144,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:145,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:146,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:147,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:148,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:149,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:15,YYAGYREKYRQADVNKLYLRYDSYTWAELAYLWY\nHLA-C03:150,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:151,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:152,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:153,YYAGYREKYRQTDVSNLYIRHDYYTWAELAYLWY\nHLA-C03:154,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:155,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:156,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:157,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:158,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:159,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:16,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYTWY\nHLA-C03:160,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:161,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWH\nHLA-C03:162,YYAGYREKYRQTDVSNLYIRYDYYTLAALAYLWY\nHLA-C03:163,YYAGYREKYRQADVNKLYIRYDYYTWAELAYLWY\nHLA-C03:164,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:165,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:166,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:167,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C03:168,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:17,YYAGYREKYRQTDVSNLYLWYDYYTWAELAYLWY\nHLA-C03:170,YCAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:171,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:172,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:173,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:174,YYAGYQEKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:175,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C03:176,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:177,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C03:178,YYAGYREKYRQTDVSNLYLRSDYYTWAELAYLWY\nHLA-C03:179,YYAGYREKYRQTYESNLYIRYDYYTWAELAYLWY\nHLA-C03:18,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:180,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:181,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:182,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:183,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:184,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:185,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:186,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:187,YYAGYREKYRQTDVSNLYIRYDYYTWAVLAYLWY\nHLA-C03:188,YYAGYREKYLQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:19,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C03:190,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:191,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:192,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:193,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:194,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:195,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:196,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:197,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:198,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:199,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:200,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:202,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:203,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:204,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:205,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:206,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:207,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:209,YYAGYREKYRQTDVSNLYIWCDYYTWAELAYLWY\nHLA-C03:21,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C03:210,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:211,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:212,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:213,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:214,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:215,YYAGYREKYRQTDVSNLYIRYDYYTWAELTYLWY\nHLA-C03:216,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:217,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:218,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:219,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:220,YYAGYREKYRQTDVSNLYIRYDYYTLAELAYLWY\nHLA-C03:221,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:222,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:223,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:225,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:226,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:227,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:228,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:23,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:230,YYAGYREKYRQTDVSNLYIRYDYYTWATLAYLWY\nHLA-C03:231,YSAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:232,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:233,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:234,YYAGYREKYRQTDVSNLYIRYDYYSWATLAYLWY\nHLA-C03:235,YYAGYREKYRQTDVSNLYIRYDYYTWTELAYLWY\nHLA-C03:236,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:237,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:238,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:239,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:24,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:240,YYAGYREKYRQTDVSNLYIRCDSYTWAELAYLWY\nHLA-C03:241,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:242,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:243,YYAGYREKYRQTDVNNLYIRYDYYTWAELAYLWY\nHLA-C03:245,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:246,YYAGYRENYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C03:247,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:248,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:249,YYAGYREKYRQTDVSNLYLWYDYYTWAELAYLWY\nHLA-C03:25,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C03:250,YYAGYREKYRQTDVSNLYIRYNFYTWAELAYLWY\nHLA-C03:251,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYTWY\nHLA-C03:252,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:253,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:254,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:255,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:256,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:257,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:258,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:259,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:26,YYAGYREKYRQTDVSNLYIRYDFYTWAELAYLWY\nHLA-C03:260,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:261,YYAGYREKYRQTDVSNLYIRYDYYTLAALAYEWY\nHLA-C03:262,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:263,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:264,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:266,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:267,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYLWY\nHLA-C03:268,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C03:269,YYAGYREKYRQTDVSNLYIRYDYYTCAELAYLWY\nHLA-C03:27,YYAGYREKYRQADVSNLYLSYDYYTWAELAYLWY\nHLA-C03:270,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:271,YYAGYREKYRQTDVSNLYLWFDSYTWAELAYLWY\nHLA-C03:272,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYEWY\nHLA-C03:273,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:274,YYAGYREKYRQADESNLYIRYDYYTWAELAYLWY\nHLA-C03:275,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:276,YYAGYREKYRQTDVSNLYIRFDYYTWAELAYLWY\nHLA-C03:278,YHAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:279,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C03:28,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYLWY\nHLA-C03:280,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:281,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:282,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:283,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:284,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:285,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:286,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:287,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYLWY\nHLA-C03:288,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:289,YYAGYREKNRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:29,YYAGYRENYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C03:290,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:291,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:292,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C03:293,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYTWY\nHLA-C03:294,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:295,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:296,YYAGYREKYRQTDVSNLYLSYDYYTWAVLAYLWY\nHLA-C03:297,YYAGYREKYRQADVNKLYIRYDYYTWAELAYLWY\nHLA-C03:298,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:299,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:30,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:300,YYAGYREKYRQTDVSNLYLRSDSYTWAELAYLWY\nHLA-C03:301,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:302,YYAGYREKYRQTDVSNLYLWCDYYTWAELAYLWY\nHLA-C03:303,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:304,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:305,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:306,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:307,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C03:308,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:309,YYAGYREKYRQTDVSNLYIRYDYYTWAALAYLWY\nHLA-C03:31,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:310,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:311,YYAGYREKYRQTDGSNLYIRYDYYTWAELAYLWY\nHLA-C03:312,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:313,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:314,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:315,YYAGYREKYRQTDVSNLYLRYDSYTWAELDYLWY\nHLA-C03:317,YYAGYREKYRQTDVSNLYIRYNYYTWAELAYLWY\nHLA-C03:319,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:32,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:320,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:321,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:322,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:324,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:325,YYAGYREKYRQTDVSNLYIRYDYYTWAALAYLWY\nHLA-C03:326,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C03:327,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:328,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:329,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:33,YYAGYREKYRQTDVSNLCLRYDSYTWAELAYLWY\nHLA-C03:330,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:331,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:332,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:333,YYAGYREKYRRTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:334,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:335,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C03:336,YYAGYREKYRQTDVSNLYIRYEYYTWAELAYLWY\nHLA-C03:337,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:338,YYAGYREKYRQTDVSNLYLRFDSYTWAELAYLWY\nHLA-C03:339,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:34,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYEWY\nHLA-C03:340,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:341,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:342,YYAEYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:343,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:344,YDSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:345,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:346,YYAGYREKYRQTDVSNLYISYDYYTWAELAYLWY\nHLA-C03:347,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:348,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:349,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:35,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C03:350,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:351,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:352,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:353,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:354,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:355,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:356,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:357,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:358,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:359,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:36,YYAGYREKYRQTDVSNLYLRYDSYTWAVLAYLWY\nHLA-C03:360,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:361,YYAGYREKYRQTDVSNLYIRYDSYTLAALAYTWY\nHLA-C03:362,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:364,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:365,YYAGYQEKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:367,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:368,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:369,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:37,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:370,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:371,YYAGYREKYRQTEVSNLYLRYDSYTWAELAYLWY\nHLA-C03:372,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:373,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:374,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:375,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:376,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:378,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:379,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:38,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C03:381,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:382,YYAGYREKYRQTDVSNLYIRYNYYTWAELAYLWY\nHLA-C03:383,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:384,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYEWH\nHLA-C03:385,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:386,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYEWY\nHLA-C03:387,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:388,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:389,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:39,YDSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:390,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:393,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:394,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:395,YYAGYREKYRQTDVSNLHIRYDYYTWAELAYLWY\nHLA-C03:397,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:398,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:399,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:40,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C03:400,YYAGYREKYRETDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:401,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:402,YYAGYREKYRQTDVSNLYLRCDSYTWAELAYLWY\nHLA-C03:403,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:404,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:405,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:406,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:407,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C03:408,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:409,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:41,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:410,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:411,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:412,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:413,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYLWY\nHLA-C03:414,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:415,YYAGYREKYRQTDVSNLHIRYDYYTWAELAYLWY\nHLA-C03:416,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:417,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:418,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:419,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:42,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C03:420,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:422,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:423,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:425,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:426,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:427,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:428,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:429,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:43,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C03:430,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:431,YYAGYREKYRQADVSNLYLRYDSYTWAELAYLWY\nHLA-C03:433,YYAGYREKYRQTDVSNLYIRYDDYTWAELAYLWY\nHLA-C03:434,YYAGYREKYRQTDVSNLYIRYDYYTLAELAYLWY\nHLA-C03:435,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:436,YYVGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:437,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:438,YYAGYREKYRQTDVSNLCIRYDYYTWAELAYLWY\nHLA-C03:439,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:44,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:440,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:441,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:443,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:45,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C03:450,YYAGYREKYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C03:451,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:452,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:453,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:454,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:455,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:456,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:457,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:458,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:459,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:46,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:460,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:47,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:48,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:49,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYLWY\nHLA-C03:50,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:51,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWH\nHLA-C03:52,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:53,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:54,YYAGYREKYRQTDVSNLYIRYDYYTWAELPYLWY\nHLA-C03:55,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C03:56,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:57,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:58,YYAGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C03:59,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:60,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:61,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:62,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:63,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:64,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:65,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:66,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:67,YDAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:68,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:69,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C03:70,YYAGYREKYRQTDESNLYIRYDYYTWAELAYLWY\nHLA-C03:71,YYAGYREKYRQTDVSNLYLWYDSYTWAELAYLWY\nHLA-C03:72,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:73,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:74,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:75,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:76,HYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:77,YYAGYREKYRQTDVSNLYIRYDYYTWAVLAYLWY\nHLA-C03:78,YYAGYREKYRQTDVSNLYIRYDYYTWAEMAYLWY\nHLA-C03:79,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:80,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C03:81,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:82,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:83,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:84,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:85,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:86,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C03:87,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:88,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:89,YYAGYREKYRQTDVSNLYLRFDSYTWAELAYLWY\nHLA-C03:90,YYAGYREKYRQTDVSNLYIRSDYYTWAELAYLWY\nHLA-C03:91,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:92,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C03:93,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:94,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C03:95,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C03:96,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C03:97,YYAGYREKYRQTDVSNLYFRYDYYTWAELAYLWY\nHLA-C03:98,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYPWY\nHLA-C03:99,YYSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C0401,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C0403,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C0404,YSAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C0405,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C0406,YYAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C0407,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C0408,YSAGYREKYRQADVNKLYLRFNFYTWAERAYLWY\nHLA-C0409,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C0410,YSAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C0411,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C0412,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C0413,YSAGYREKYRQADVNKLYLRFNFYTWAALAYTWY\nHLA-C0414,YSAGYREKYRQADVNKLYLRFNFYTWAEQAYTWY\nHLA-C0415,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C0416,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C0417,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C0418,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:01,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:03,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:04,YSAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C04:05,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:06,YYAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C04:07,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:08,YSAGYREKYRQADVNKLYLRFNFYTWAERAYLWY\nHLA-C04:10,YSAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C04:100,YSAGYREKYRQADVNKLYIRYDFYTWAERAYTWY\nHLA-C04:101,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:102,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:103,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:104,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:106,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:107,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:108,YSAGYREKYRQADVNKLYLRFDFYTWAERAYTWY\nHLA-C04:109,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:11,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C04:110,YSAGYREKYRQADVNKLYIRFNFYTWAERAYTWY\nHLA-C04:111,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:112,YSAGYRENYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:113,YSAGYREKYRQADVNKLYLRFNFYTWAVRAYTWY\nHLA-C04:114,YSAGYREKYRQADVNNLYLRFNFYTWAERAYTWY\nHLA-C04:116,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:117,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:118,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:119,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:12,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:120,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:121,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:122,YSAGYREKYRQADVNKLYLRFNFYTWAVLAYTWY\nHLA-C04:124,YSAGYRQKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:125,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:126,YSAGYREKYRQADVNKLYLRFNFYIWAERAYTWY\nHLA-C04:127,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:128,YSAGCREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:129,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:13,YSAGYREKYRQADVNKLYLRFNFYTWAALAYTWY\nHLA-C04:130,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:131,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:132,YSAGYREKYLQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:133,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:134,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:135,YSAGYREKYRQADVNKLYLRFNYYTWAERAYTWY\nHLA-C04:136,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:137,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:138,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:139,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:14,YSAGYREKYRQADVNKLYLRFNFYTWAEQAYTWY\nHLA-C04:140,YSSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:141,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:142,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:143,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:144,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:145,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:146,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:147,YYAGYREKYRQADVNKLYLRFNFYTWAERAYLWY\nHLA-C04:148,YSAGYREKYRHADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:149,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:15,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C04:150,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:151,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:152,YSAGYREKYRQADVNKLYLRFNFYNWAERAYTWY\nHLA-C04:153,YSAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C04:154,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:155,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:156,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:157,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:158,YSAGYREKYRQADVNKLYLRFNFYTCAERAYTWY\nHLA-C04:159,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:16,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:160,YYAGYREKYRQADVNKLYLRFNFYTLAALAYTWY\nHLA-C04:161,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:162,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:163,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:164,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:165,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:166,YSSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:167,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:168,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTCY\nHLA-C04:169,YSAGYRENYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C04:17,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C04:171,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:172,YSAGYREKYRQADVSNLYLRFNFYTWAERAYTWY\nHLA-C04:174,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:175,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:176,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:177,YSAGYREKYRQADVNKLYLRFNFYTWAERAYKWY\nHLA-C04:178,YSAGYREKYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C04:179,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:18,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:180,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:181,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:182,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:183,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:184,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:185,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:186,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:187,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:188,YSAGYREKYRQADVNKLYLRFNFYTWAERTYTWY\nHLA-C04:189,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:19,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:190,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:192,YSAGYREKYRQDDVNKLYLRFNFYTWAERAYTWY\nHLA-C04:193,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:194,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:195,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:196,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:197,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:198,CSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:199,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:20,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:200,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:201,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:202,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:204,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:206,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:207,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:208,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:209,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:210,YSAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C04:211,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:212,YSAGYREKYRQADVNKLYLRFNFYTWAVLAYLWY\nHLA-C04:213,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:214,YSAGYREKYRQTDVSKLYLRFNFYTWAERAYTWY\nHLA-C04:216,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:218,YSAGYREKYRQADVNKLYLRFNLYTWAERAYTWY\nHLA-C04:219,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:220,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:221,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:222,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:223,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:224,YSAGYREKYRQADVNKLYIRYNFYTWAERAYTWY\nHLA-C04:226,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:227,YSAGYREKYRQADGNKLYLRFNFYTWAERAYTWY\nHLA-C04:228,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:229,YSAGYREKYRQADVNKLYLRFKFYTWAERAYTWY\nHLA-C04:23,YSAGYREKYRQADVNKLYLRFDFYTWAERAYTWY\nHLA-C04:230,YSAGYREKYRQADVNKLYLWYNFYTWAERAYTWY\nHLA-C04:231,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:232,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:235,YSAGYREKYRQADVNKLYLRFNFYTWSERAYTWY\nHLA-C04:237,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:238,YSAGYREKYRQADVNKLHLRFNFYTWAERAYTWY\nHLA-C04:239,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:24,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:240,YSAGYREKYRQADVNKLYLRSNFYTWAERAYTWY\nHLA-C04:241,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:242,YSAGYREKYRQADVNKLYFRYNFYTWAERAYTWY\nHLA-C04:243,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:244,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:245,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:246,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:247,YSAGYREKYRQADVNKLYLRFNFYTRAERAYTWY\nHLA-C04:248,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:249,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:25,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:250,YSAGYREKYRQADVNKQYLRFNFYTWAERAYTWY\nHLA-C04:251,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:252,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTRY\nHLA-C04:254,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:256,YYSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:257,YSAGYREKYRQADVNKLYLRFNFYTWAERACTWY\nHLA-C04:258,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:259,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:26,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:260,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:261,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:262,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:263,YSAGYREKYRQADVNKLYLWFNFYTWAERAYTWY\nHLA-C04:264,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:265,YSAGYREKYRQADVNKLYLRFNFYTWAVLAYEWY\nHLA-C04:266,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:267,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:268,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:269,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:27,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:270,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:271,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:272,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:273,YSAGYQEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:274,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:275,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:276,YSAGYREKYRQADVNKLYLSFNFYTWAERAYTWY\nHLA-C04:277,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:278,YSAGYREKYRQANVNKLYLRFNFYTWAERAYTWY\nHLA-C04:28,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:280,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:281,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:282,YSAGYREKYRQADVNKLYLRFNFYTWAERVYTWY\nHLA-C04:283,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:284,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:285,YSAGYREKYCQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:286,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:287,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:288,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:289,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:29,YSAGYREKYRQADVSNLYLRFNFYTWAERAYTWY\nHLA-C04:290,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:291,YSAGYREKYRQADVNKLYLRFNFYTWATLAYTWY\nHLA-C04:292,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:293,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:294,YYAGYREKYRQADVNKLYLRYNFYTWAELAYTWY\nHLA-C04:295,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:296,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:297,YSAGYREKYRQADVNKLYLRFNFYTWADRAYTWY\nHLA-C04:298,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:299,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:30,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:301,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:302,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:303,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:304,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:306,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:307,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:308,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:31,YSAGYREKYRQADVNKLYLRFNFYTWVERAYTWY\nHLA-C04:310,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:311,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:312,YSAGYREKYRQADVNKLYLRFNIYTWAERAYTWY\nHLA-C04:313,YSAGYREKYRQADVNKLYLRFNSYTWAERAYTWY\nHLA-C04:314,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:315,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:316,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:317,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:318,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:319,YSAGYREKYRQADVNKLSLRFNFYTWAERAYTWY\nHLA-C04:32,YSAGYREKYRQADVNKLYLRFNFYTWAERAYEWY\nHLA-C04:320,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:321,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:322,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:323,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:324,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:325,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:326,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:327,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:328,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:329,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:33,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:330,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:331,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:332,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:333,YSAGYREKYHQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:334,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:335,YYAGYREKYRQADVNKLYLRFDFYTWAERAYTWY\nHLA-C04:336,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:337,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:338,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:339,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:34,YSAGYREKYRQADVNKLYLRFNFYTWAVLAYLWY\nHLA-C04:340,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:341,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:342,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:343,YSAGFREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:344,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:345,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTSY\nHLA-C04:346,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:347,YSAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C04:348,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:35,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:351,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:352,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:353,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:354,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:355,HSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:356,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:357,YYAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C04:358,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:359,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:36,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C04:37,YSAGYREKYRQADVNKLYLWCNFYTWAERAYTWY\nHLA-C04:38,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:39,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:40,YSAGYREKYRQADVNKLYFRFNFYTWAERAYTWY\nHLA-C04:41,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:42,YDAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:43,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:44,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:45,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:46,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:47,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:48,YSAGYREKYRQADVNKLYLRFNFYTWAERPYTWY\nHLA-C04:49,YSAGYWEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:50,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:51,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:52,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:53,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:54,YSAGYREKYRQADVNKLYLRFDSYTWAERAYTWY\nHLA-C04:55,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C04:56,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:57,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:58,YSAGYREKYRQADVNKLYLRFNFYTLAALAYTWY\nHLA-C04:60,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:61,YSAGYREKYRQADVNKLYLRFNFYTWAARAYTWY\nHLA-C04:62,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:63,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:64,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:65,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:66,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:67,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:68,YSAGYREKYRQADVNKLYLRFNFYTWAAQAYTWY\nHLA-C04:69,YSAGYGEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:70,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:71,YFAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:72,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:73,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:74,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:75,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:76,YSAGNREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:77,YSAGYREKYRQADVNKLYLRFNFYTWAERAYEWY\nHLA-C04:78,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:79,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:80,YYAGYREKYRQADVNKLYIRYNFYTWAERAYTWY\nHLA-C04:81,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:82,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:83,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:84,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:85,YSAGYREKYRQADVNKLYLRFNFYTWAERAYMWY\nHLA-C04:86,YSAGYREKYSQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:87,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:89,YSAGYREKYRQADVNKLYLRFNYYTWAERAYTWY\nHLA-C04:90,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:91,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:92,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:94,YSAGYREKYRQADVNKLYLRFNFYTWAEWAYTWY\nHLA-C04:96,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:97,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:98,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C04:99,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C0501,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C0502,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYMWY\nHLA-C0503,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C0504,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYTWY\nHLA-C0505,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C0506,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C0508,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYTWY\nHLA-C0509,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C0510,YYAGYREKYRQTDVNKLYIRYNFYTWAERAYTWY\nHLA-C0511,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYTWY\nHLA-C0512,YYAGYREKYRQTDVNKLYLRYNFYTWAVRAYTWY\nHLA-C0513,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:01,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:03,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:04,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYTWY\nHLA-C05:05,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:06,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:08,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYTWY\nHLA-C05:09,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:10,YYAGYREKYRQTDVNKLYIRYNFYTWAERAYTWY\nHLA-C05:100,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:101,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:102,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTSY\nHLA-C05:103,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYTWY\nHLA-C05:104,YYAGYREKYRQTDVNKLYFRYNFYTWAERAYTWY\nHLA-C05:105,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYEWY\nHLA-C05:106,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYTWY\nHLA-C05:107,YYAGYREKYRQTDVNKLYLWCDYYTWAERAYTWY\nHLA-C05:108,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:109,YYAGYREKYRQTDMNKLYLRYNFYTWAERAYTWY\nHLA-C05:11,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYTWY\nHLA-C05:110,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:111,YYAGYREKYRQTDVNKLYLRSNFYTWAERAYTWY\nHLA-C05:112,YYAGYREKYRQTDVNKLYLRYNFYTWAEQAYTWY\nHLA-C05:114,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:115,YYAGYREKYRQTDVNKLYLRYNFYTWAALAYTWY\nHLA-C05:116,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:117,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:118,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:119,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:12,YYAGYREKYRQTDVNKLYLRYNFYTWAVRAYTWY\nHLA-C05:120,YYAGYREKYRRTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:121,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:122,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:123,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:124,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:125,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:126,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:127,YYGGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:129,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:13,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:130,YYAGYREKYRQTDVNKLYLRYNFYTWAERAFTWY\nHLA-C05:131,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:132,YYAGYREKYRQTDVNKLYLRYDLYTWAERAYTWY\nHLA-C05:133,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:134,YYAGYREKYRQTDVNKLYLRYNFYTLAALAYTWY\nHLA-C05:135,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYTWY\nHLA-C05:136,YYAGYREKYRQTDVNKLYLRYNFYSWAERAYTWY\nHLA-C05:137,YYAGYREKYRQSDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:138,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:139,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:14,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:140,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:141,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:142,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:143,YYAGYREKYRQTDVNKLYLRYNFYTLAARAYTWY\nHLA-C05:144,YYAGYRGKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:145,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:146,YYAGYREKYRQTDVNKLYLRYNFYTWAEDAYTWY\nHLA-C05:147,YYAGYREKYRQTDVNKLYLRYDYYTWAERAYTWY\nHLA-C05:148,YYAGYREKYRQTDVNKLYIRYDYYTWAERAYTWY\nHLA-C05:149,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:15,YYAGYWEKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:150,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:151,YYAGYREKYRQTDVNKLYLRYNFYTLAVLAYLWY\nHLA-C05:152,YYAGYREKYRQTDVNKLYWRYNFYTWAERAYTWY\nHLA-C05:155,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:156,YSAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:157,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:158,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:159,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:16,YYAGYREKYRQTDVNKLYLWYNFYTWAERAYTWY\nHLA-C05:160,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:161,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:162,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:163,YYAGYREKYRQTDVNKLYLRYNFYTWAADAYTWY\nHLA-C05:164,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:165,YYAGYRKKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:166,YYAGYREKYRQTDVNKLYLRYNLYTWAERAYTWY\nHLA-C05:167,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:168,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:17,YYAGYREKYRQTDVNKLYLRYNFYTWAALAYTWY\nHLA-C05:170,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:171,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:172,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:173,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:174,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:176,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:177,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:178,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:179,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:18,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:181,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:182,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:183,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:184,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYLSY\nHLA-C05:185,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:186,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:187,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:188,YYAGYREKYRQTDVNKLYLRYNSYTWAERAYTWY\nHLA-C05:189,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:19,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:190,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:191,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:192,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:193,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:194,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:195,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:196,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:197,YYSGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:198,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:199,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:20,YYAGYREKYRQTDVNNLYLRYNFYTWAERAYTWY\nHLA-C05:200,YFAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:201,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:203,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:21,YYAGYREKYRQTDVNKLHLRYNFYTWAERAYTWY\nHLA-C05:22,YYAGYREKYRQTDVNKLYLRYDFYTWAERAYTWY\nHLA-C05:23,YYAGYREKYRQTDVNKLYLRYNFYTLAERAYTWY\nHLA-C05:24,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:25,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:26,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:27,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYLWY\nHLA-C05:28,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:29,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:30,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:31,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:32,YYAGYREKYRQTDVNRLYLRYNFYTWAERAYTWY\nHLA-C05:33,YYAGCREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:34,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:35,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:36,YYAGYRENYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:37,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:38,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:39,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYLWY\nHLA-C05:40,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:41,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:42,YYAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C05:43,YDAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:44,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:45,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:46,YYAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C05:47,YYAGYREKYRQTDVNKLYLRYNYYTWAERAYTWY\nHLA-C05:49,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:50,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:52,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYTWY\nHLA-C05:53,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:54,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:55,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:56,YYAGYQEKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:57,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:58,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:59,YYAGYREKYRQTDVNKLYLRCNFYTWAERAYTWY\nHLA-C05:60,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:61,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:62,YYAGYREKYRQTDVNKLYLRYNFYTLAADAYTWY\nHLA-C05:63,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:64,YYAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C05:65,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:66,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:67,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:68,YYAGYREKYRQTDVNKLYLRYNFYTWAALAYTWY\nHLA-C05:69,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:70,YYAGYREKYRQTDVNKLYLRYNFYTWAELTYTWY\nHLA-C05:71,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:72,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:73,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:74,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:75,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:76,YYAGYREKYRQTDVNKLYLRYNFYTWAARAYTWY\nHLA-C05:77,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:78,YSAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:79,YYAGYREKYRQTDVNKLYLRYNFYTWATLAYTWY\nHLA-C05:80,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:81,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:82,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:83,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:84,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:85,YYAGYREKYRQTDVNKLYLWCNFYTWAERAYTWY\nHLA-C05:86,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:87,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:88,YYAGYREKYRQTDVNKLYLRYNFYTWAQRAYTWY\nHLA-C05:89,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYTWY\nHLA-C05:90,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:93,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:94,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:95,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:96,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:97,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C05:98,YYSGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C0602,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C0603,YYSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C0604,YDSGYREKYRQADVNKLYLWYDSYTWAELAYTWY\nHLA-C0605,YDSGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C0606,YDSGYREKYRQADVNKLYLWYDSYTWAERAYTWY\nHLA-C0607,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C0608,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYEWY\nHLA-C0609,YDSGYREKYRQADVNKLYLWYNFYTWAEWAYTWY\nHLA-C0610,YDPGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C0611,YDSGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C0612,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C0613,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:02,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:03,YYSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:04,YDSGYREKYRQADVNKLYLWYDSYTWAELAYTWY\nHLA-C06:05,YDSGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C06:06,YDSGYREKYRQADVNKLYLWYDSYTWAERAYTWY\nHLA-C06:07,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:08,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYEWY\nHLA-C06:09,YDSGYREKYRQADVNKLYLWYNFYTWAEWAYTWY\nHLA-C06:10,YDPGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:100,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:101,YDSGYREKYRQADVNKLYLRFDSYTWAEWAYTWY\nHLA-C06:102,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:103,YDSGYREKYRQADVNKLYLWYEYYTWAEWAYTWY\nHLA-C06:104,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:105,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:106,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:107,YDSGYREKYRQADVNKLYLWYDYYTWAEWAYTWY\nHLA-C06:108,YDSGYREKYRQADVNKLYLWYDSYTWPEWAYTWY\nHLA-C06:109,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:11,YDSGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C06:110,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:111,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:112,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:113,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:114,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:115,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:117,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:118,YDSGYREKYRQADVNKLYLWYDSYTLAALAYTWY\nHLA-C06:119,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:12,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:120,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:121,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:122,YDSGYREKYRQADVSKLYLWYDSYTWAEWAYTWY\nHLA-C06:123,YDSGYREKYRQADVNKLYFWYDSYTWAEWAYTWY\nHLA-C06:124,YDSGYREKYRQTDVSKLYLWYDSYTWAEWAYTWY\nHLA-C06:125,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:126,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:127,YDSGYREKYRQADVNKLYLRYDSYTWAEWAYTWY\nHLA-C06:129,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:13,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:130,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:131,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:132,YYAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:133,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:135,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:136,YDSGYREKYRQADVNKLYLRSDSYTWAEWAYTWY\nHLA-C06:137,YDSGYREKYRQADVNKLYLWYDSYTWVEWAYTWY\nHLA-C06:138,YDSGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:139,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:14,YDSGYREKYRQADVNKLYIWYDSYTWAEWAYTWY\nHLA-C06:140,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:141,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:142,YDSGYREKYRQADVNKLYLWFDSYTWAEWAYTWY\nHLA-C06:143,YDSGYREKYRQADVNKLYIRYDSYTWAEWAYTWY\nHLA-C06:144,YDSGYREKYRQADVNKLYLRYNFYTWAEWAYTWY\nHLA-C06:145,YDSGYREKYRQADVNKLYLWHDSYTWAEWAYTWY\nHLA-C06:146,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:147,YDSGYRNIFTNTYESNLYLWYDSYTWAEWAYTWY\nHLA-C06:148,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:149,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:15,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:150,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:151,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:153,YDSGYREKYRQADVNKLYLWYDSYTWAVLAYTWY\nHLA-C06:154,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:155,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:156,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:157,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:158,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:159,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:160,YDSGYREKYRQADVNQLYLWYDSYTWAEWAYTWY\nHLA-C06:161,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYAWY\nHLA-C06:162,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:163,YDSGYREKYRQADVNKLCLWYDSYTWAEWAYTWY\nHLA-C06:164,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:165,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTSY\nHLA-C06:166,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:167,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:168,YDSGYREKYRQADVNKLYLWYDLYTWAEWAYTWY\nHLA-C06:169,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:17,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:170,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:172,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:173,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:174,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:176,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:177,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:178,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:179,YDSGYREKYRQADVNKLYLWCDYYTWAEWAYTWY\nHLA-C06:18,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:180,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:181,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:182,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:183,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:184,YDSGYREKYRQADVNRLYLWYDSYTWAEWAYTWY\nHLA-C06:185,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:186,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:187,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:188,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:189,YDSGYREKYRQADVNKLYLWYDSCTWAEWAYTWY\nHLA-C06:19,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:190,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:191,YDSGYREKYRQADVNKLYLWYDSHTWAEWAYTWY\nHLA-C06:192,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:193,YDSGYREKYREADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:194,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:195,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:196,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:197,YDSGYREKYRQADVNKLYLWYDSYTWAELAYLWY\nHLA-C06:198,YDSGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:199,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:20,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:201,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:202,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:203,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:204,YDSGYREKYRQADVNKLYLWCDYYTWAERAYTWY\nHLA-C06:205,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:206,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:207,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWH\nHLA-C06:209,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:21,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:210,YDSGYREKYRQADVNNLYLWYDSYTWAEWAYTWY\nHLA-C06:212,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:213,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:214,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:216,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:217,YDSGYREKYRQTDVSNLYLWYDSYTWAEWAYTWY\nHLA-C06:218,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:219,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:22,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:221,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:222,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:223,YDSGYREKYRQADVNKLHLWYDSYTWAEWAYTWY\nHLA-C06:224,YDSGYREKYRQADVNKLYLWYDLYTWAEWAYTWY\nHLA-C06:225,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:226,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:227,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:228,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:229,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:23,YDSGYREKYRQADVNKLYLWCDSYTWAEWAYTWY\nHLA-C06:230,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:231,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:232,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:233,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:234,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:235,YDSGYREKYRQVDVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:236,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:237,YDSGYREKYRQADVNKLYLWYDSYTWAEWASTWY\nHLA-C06:238,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:239,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:24,YDSGYREKYRQADVNKLYLWYDSYTWAEWAHTWY\nHLA-C06:240,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:241,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:242,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:243,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:244,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:245,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:246,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:247,YDSGYREKYRQAHVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:248,YDSGYREKYRQTDVSNLYLWYDSYTWAEWAYTWY\nHLA-C06:249,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:25,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:250,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:251,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:26,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:27,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:28,YDAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:29,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:30,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:31,YDSGYREKYRQADVNKLYLWYDSYTWAAWAYTWY\nHLA-C06:32,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:33,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:34,YDSGYREKYRQADVNKLYLWYDFYTWAEWAYTWY\nHLA-C06:35,YDSGYREKYRQADVNKLYIRSDSYTWAEWAYTWY\nHLA-C06:36,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:37,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:38,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:39,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:40,YDSGYREKYRQADVNKLYLWYDSYTWAEWTYTWY\nHLA-C06:41,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:42,YDSGYREKYRQADVNKLYLWYDSYTRAEWAYTWY\nHLA-C06:43,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:44,YDSGYRENYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:45,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:47,YDSGYREKYRQADVNKLYLWSDSYTWAEWAYTWY\nHLA-C06:48,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:50,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:51,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:52,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:53,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:54,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:55,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:56,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:57,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:58,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:59,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:60,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:61,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:62,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:63,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:64,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:65,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:66,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:67,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:68,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:69,YDSGYREKYRQADENKLYLWYDSYTWAEWAYTWY\nHLA-C06:70,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:71,YDSGYPEKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:72,YDSGYREKYRQADVNKLYIVYDSYTWAEWAYTWY\nHLA-C06:73,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:75,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:76,YYAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:77,YHSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:78,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:80,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:81,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:82,YDSGYREKYRQADVNNLYLWYDSYTWAEWAYTWY\nHLA-C06:83,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:84,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:85,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:86,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:87,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:88,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:89,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:90,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:91,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:92,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:93,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:94,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:95,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:96,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYLWY\nHLA-C06:97,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:98,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C06:99,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C0701,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0702,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C0703,YDSGYREKYRQADVSNLYLRSDSYTWAALAYLWY\nHLA-C0704,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C0705,YDSGYREKYRQADVSNLYLNYDSYTLAALAYTWY\nHLA-C0706,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0707,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C0708,YDSGYREKYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C0709,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C0710,YDSGYREKYRQADVSNLYIRSDSYTLAALAYTWY\nHLA-C0711,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C0712,YDSGYREKYRQADVSNLYFRYDFYTWAADAYTWY\nHLA-C0713,YDSGYREKYRQADVSNLYLRSDFYTLAALAYTWY\nHLA-C0714,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0715,YDSGYREKYRQADVSNLYLRSDSYTLAALAYEWY\nHLA-C0716,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0717,YDSGYREKYRQADVSNLYLRSDSYTWAALAYTWY\nHLA-C0718,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0719,YDSGYRENYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C0720,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0721,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0722,YDSGYRENYRQADVSNLYLRYDSYTLAAWAYTWY\nHLA-C0723,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C0724,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0725,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C0726,YYSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0727,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0728,YDSGYRENYRQADVSNLYLRYNFYTLAALAYTWY\nHLA-C0729,YDSGYREKYRQADVSNLYLRSDYYTLAALAYTWY\nHLA-C0730,YDSGYRENYRQADVSNLYLRYDSYTLAGLAYTWY\nHLA-C0731,YDSGYREKYRQADVSNLYLWYDSYTLAALAYTWY\nHLA-C0732,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTAP\nHLA-C0734,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0735,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0736,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C0737,YDSGYREKYRQADVSNLYLRSDSYTLAARAYTWY\nHLA-C0738,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:01,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:02,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:03,YDSGYREKYRQADVSNLYLRSDSYTWAALAYLWY\nHLA-C07:04,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:05,YDSGYREKYRQADVSNLYLNYDSYTLAALAYTWY\nHLA-C07:06,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:07,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C07:08,YDSGYREKYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C07:09,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C07:10,YDSGYREKYRQADVSNLYIRSDSYTLAALAYTWY\nHLA-C07:100,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:101,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:102,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:103,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:105,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:106,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:107,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:108,YDSGYRENYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C07:109,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:11,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:110,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:111,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:112,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:113,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:114,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:115,YDSGYRENYRQADVSDLYLRYDSYTLAALAYTWY\nHLA-C07:116,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:117,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:118,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:119,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:12,YDSGYREKYRQADVSNLYFRYDFYTWAADAYTWY\nHLA-C07:120,YDSGYRENYRQADVSNLYLRYDSYTLAALAYPWY\nHLA-C07:122,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:123,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:124,YDSGYRENYRQADESNLYLRYDSYTLAALAYTWY\nHLA-C07:125,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:126,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:127,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:128,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:129,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:13,YDSGYREKYRQADVSNLYLRSDFYTLAALAYTWY\nHLA-C07:130,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:131,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:132,DDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:133,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:134,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:135,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:136,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:137,YDSGYREKYRQADVSNLYLRSDSYTLAALTYTWY\nHLA-C07:138,YDSGYREKYRQADVSNLYLRSDSYTLAAWAYTWY\nHLA-C07:139,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:14,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:140,YDSGYRENYRQADVSNLYLRYDSYTWAVDAYTWY\nHLA-C07:141,YDSGYRENYRQADVSNLYLRYDSYTWAALAYTWY\nHLA-C07:142,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:143,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:144,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:145,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:146,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:147,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:148,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:149,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:15,YDSGYREKYRQADVSNLYLRSDSYTLAALAYEWY\nHLA-C07:151,YDSGYRENYRQADVSNLYLRYDSYTLAADAYTWY\nHLA-C07:153,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:154,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:155,YDSGYREKYRQADESNLYLRSDSYTLAALAYTWY\nHLA-C07:156,YDSGYRENYRQAHVSNLYLRYDSYTLAALAYTWY\nHLA-C07:157,YDSGYREKYRQADVSNLYLKSDSYTLAALAYTWY\nHLA-C07:158,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:159,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:16,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:160,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:161,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:162,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:163,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:165,YDSGYRENYRQADVSNLYLRYDSYTLAELAYTWY\nHLA-C07:166,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:167,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:168,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:169,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:17,YDSGYREKYRQADVSNLYLRSDSYTWAALAYTWY\nHLA-C07:170,YDSGYRENYRQADVSNLYLRYDSYTYAALAYTWY\nHLA-C07:171,YDSGYREKYRQADVSNLYLRSDSYTYAALAYTWY\nHLA-C07:172,YDAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:173,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:174,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:175,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:176,YDSGYRENYRQADVSNLYLRYDSYTLAARAYTWY\nHLA-C07:177,YDSGYREKYRQTDVSNLYLWCDYYTLAALAYTWY\nHLA-C07:178,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:179,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:18,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:180,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:181,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:182,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:183,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:184,YDSGYREKYRQADVSNLYIRYDYYTLAALAYTWY\nHLA-C07:185,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:186,YDSGYREKYRQADVSNLYLRSDSYTLAALAYLWY\nHLA-C07:187,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:188,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:189,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:19,YDSGYRENYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:190,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:192,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:193,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:194,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:195,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:196,YDSGYRENYRQADVSNLYIRYDSYTLAALAYTWY\nHLA-C07:197,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:199,YDSGYREKYRQADVSNLYLRYDFYTLAADAYTWY\nHLA-C07:20,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:200,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:201,YDSGYRENYRQADVSNLCLRYDSYTLAALAYTWY\nHLA-C07:202,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:203,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:204,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:205,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:206,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:207,YDSGYRENYRQADVSNLYLRYDSYTLAALAYEWY\nHLA-C07:208,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:209,YDSAYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:21,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:210,YDSGYRENYRQTDVNNLYLRYDSYTLAALAYTWY\nHLA-C07:211,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:212,YDSGYRENYRQADVSNLYLRYDSYTLAALAYLWY\nHLA-C07:213,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:214,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:215,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:216,YDSGYREKYRQADVSNLYLRSDSYTLAAQAYTWY\nHLA-C07:217,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:218,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:219,YDSGYRENYRQADVSNLYLRYESYTLAALAYTWY\nHLA-C07:22,YDSGYRENYRQADVSNLYLRYDSYTLAAWAYTWY\nHLA-C07:220,YDSGYREKYRQADVSNLYLRSDSYTLAALAYKWY\nHLA-C07:221,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:222,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:223,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:224,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:225,YDSGYRENYCQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:226,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:228,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:229,YDSGYQEKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:23,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:230,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:231,YDSGYRENYRQADVSNLYLRYDDYTLAALAYTWY\nHLA-C07:232,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:233,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:234,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:236,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:237,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:238,YDSGYREKYRQTDVNNLYLRSDSYTLAALAYTWY\nHLA-C07:239,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:24,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:240,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:241,YDSGYREKYRQADVSNLYLRSDSYTLAELAYTWY\nHLA-C07:242,YDSGYREKYRQADVSNLYLRSDSYTWAELAYLWY\nHLA-C07:243,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:244,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:245,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:246,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:247,YDSGYRENYRQADVNNLYLRYDSYTLAALAYTWY\nHLA-C07:248,YDSGYRENYRQADVSNLYLRYDYYTLAALAYTWY\nHLA-C07:249,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:25,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:250,YDSGYRENYRQADVSNLYLRCDSYTLAALAYTWY\nHLA-C07:251,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:252,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:253,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:254,YDSGYRENYRQADVSNLHLRYDSYTLAALAYTWY\nHLA-C07:255,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:256,YDSGYRENYRQADVSNLYLRYDSCTLAALAYTWY\nHLA-C07:257,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:258,YDSGYREKYRQADVSNLYLRADSYTLAALAYTWY\nHLA-C07:259,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:26,YYSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:260,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:261,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:262,YDSGYWEKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:263,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:265,YDSGYREKYRQADVSNLYLRSDSYTLAELAYLWY\nHLA-C07:266,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:267,CDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:268,YDSGYRENYRQADVSNLNLRYDSYTLAALAYTWY\nHLA-C07:269,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:27,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:270,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:271,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:272,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:273,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:274,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:275,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:276,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:277,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:278,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:279,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:28,YDSGYRENYRQADVSNLYLRYNFYTLAALAYTWY\nHLA-C07:280,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:281,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:282,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:283,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:284,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:285,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:286,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:287,YDSGYREKYRQADVSNLYLRSDSYILAALAYTWY\nHLA-C07:288,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:289,YDSGYREKYRQADVSNLYLRSDLYTLAALAYTWY\nHLA-C07:29,YDSGYREKYRQADVSNLYLRSDYYTLAALAYTWY\nHLA-C07:290,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:291,YDSGYREKYCQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:292,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:293,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:294,YDSGYREKYRQADVSNLYLSYDSYTLAALAYTWY\nHLA-C07:296,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:297,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:298,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:299,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:30,YDSGYRENYRQADVSNLYLRYDSYTLAGLAYTWY\nHLA-C07:300,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:301,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:302,YDSGYREKYRQADVSNLYFRSDSYTLAADAYTWY\nHLA-C07:303,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:304,YDSGYRENYRQADVSNLYLRYDSYTWAELAYTWY\nHLA-C07:305,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:306,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:307,YDSGYREKYRQADVSNLYLRSDSYTWAERAYTWY\nHLA-C07:308,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:309,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:31,YDSGYREKYRQADVSNLYLWYDSYTLAALAYTWY\nHLA-C07:310,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:311,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:312,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:313,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:314,YYSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:315,YDSGYREKYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C07:316,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:317,YYSGYRENYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:318,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:319,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:320,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:321,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:322,CDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:323,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:324,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:325,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:326,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:327,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:328,YDSGYREKYRQTDVNKLYFRYDFYTLAADAYTWY\nHLA-C07:330,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:331,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:332,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:333,YDSGYREKYRQADVSNLYLRSDSYTLAALAYRWY\nHLA-C07:334,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:335,YDSGYREKYAQTDVSNLYLRSDSYTLAALAYTWY\nHLA-C07:336,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:337,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:338,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:339,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:340,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:341,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:342,YDSGYREKYRQADVSNLYLRSDSYTLAVLAYTWY\nHLA-C07:343,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:344,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:345,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:346,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:348,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:349,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:35,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:351,YYSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:352,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:353,YDSGYRENYRQADVSNLYLKYDSYTLAALAYTWY\nHLA-C07:354,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:355,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:356,YDSEYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:357,YDSGYREKYRQADVTNLYFRYDFYTLAADAYTWY\nHLA-C07:358,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:359,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:36,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:360,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:361,YDSGYREKYRQADVSHLYFRYDFYTLAADAYTWY\nHLA-C07:362,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:363,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:364,YDSGYREKYRQADVSNLYLWCDFYTLAADAYTWY\nHLA-C07:365,YDSGYREKYRQADVSNLYFRYDFYTLAADAYRWY\nHLA-C07:366,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:367,YDSGYREKYRQADVSNLYIRSDSYTLAALAYTWY\nHLA-C07:368,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:369,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:37,YDSGYREKYRQADVSNLYLRSDSYTLAARAYTWY\nHLA-C07:370,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:371,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:372,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:373,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:374,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:375,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:376,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:377,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:378,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:379,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:38,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:380,YDSGYRENHRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:381,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:382,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:383,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:384,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:385,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:386,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:387,YDSGYQENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:388,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:389,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:39,YDSGYREKYRQTDVSNLYLRSDSYTLAALAYTWY\nHLA-C07:390,YDAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:391,YDSGYREKHRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:392,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:394,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:395,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:396,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:397,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:398,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:399,YDSGYREKYRQADVSNLYLRSDSYPLAALAYTWY\nHLA-C07:40,YDSGYRENYRQTDVSNLYLRYDSYTLAALAYTWY\nHLA-C07:400,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:401,YDSVYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:402,YSSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:403,YDSGYREKYRQADVNNLYFRYDFYTLAADAYTWY\nHLA-C07:404,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:405,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:406,YDSGYREKYRQADVNKLYFRYDFYTLAADAYTWY\nHLA-C07:407,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:408,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:409,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:41,YDSGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C07:410,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:411,YDSGYRENYHQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:412,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:413,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:414,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:415,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:416,YDSGYREKYRQADLSNLYLRSDSYTLAALAYTWY\nHLA-C07:417,YDSGYLENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:418,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:419,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:42,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:420,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:421,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:422,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:423,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:424,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:425,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:426,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:427,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:428,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:429,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:43,YDSGYREKYRQADVSNLYIRYDSYTLAALAYTWY\nHLA-C07:430,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:431,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:432,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:433,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:434,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:435,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:436,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:438,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:439,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:44,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:440,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:441,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:442,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:443,YDSGYRENYRQADVSNLYLRYDSYTLAALASTWY\nHLA-C07:444,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:445,YDSGYRENYSQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:446,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:447,YDSGYREKYRQADVSNLYFRYDFYTWAEWAYTWY\nHLA-C07:448,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:449,YNSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:45,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:450,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:453,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:454,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:455,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:456,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:457,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:458,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:459,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:46,YDSEYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:460,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:461,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:462,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:463,YDSGYRENYRQADVSNLYWRYDSYTLAALAYTWY\nHLA-C07:464,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:465,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:466,YDSGYREKYRQADVSNLYFRYDFYTLAADAYKWY\nHLA-C07:467,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:468,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:469,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWF\nHLA-C07:47,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:470,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:471,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:472,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:473,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:474,YDSGYREKYRQADVSNLYLRSDSYTLTALAYTWY\nHLA-C07:475,YDSGYWENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:477,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:478,YDSGYREKYRQADVSNQYLRSDSYTLAALAYTWY\nHLA-C07:479,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:48,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:480,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:481,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:482,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:485,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:486,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:487,YDSGYREKYRQADVSNLYFRYDFYTLAVDAYTWY\nHLA-C07:488,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:489,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:49,YDSGYREKYRQADVNNLYLRSDSYTLAALAYTWY\nHLA-C07:490,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:492,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:493,YDSGYRENYRQGDVSNLYLRYDSYTLAALAYTWY\nHLA-C07:495,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:496,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:497,YDSGYREKYRQADVSNLYLRSDSCTLAALAYTWY\nHLA-C07:498,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:499,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:50,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:500,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:501,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:502,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:503,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:504,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:505,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:506,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:508,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:509,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:51,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:510,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:511,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:512,YDSGYRENYRQADVSNLYLRYDSYTWAERAYTWY\nHLA-C07:514,YDSGYRENYRQADVSNLYLWYDSYTLAALAYTWY\nHLA-C07:515,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:516,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWH\nHLA-C07:517,FDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:518,YDSGYREKDRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:519,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:52,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:520,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:521,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWH\nHLA-C07:522,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:523,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:524,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:525,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:526,YDSGYRENYRQADVSNLYLSYDSYTLAALAYTWY\nHLA-C07:527,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:528,YDSGYRENYRRADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:529,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:53,YDSGYRENYRQADVSNLYLRYDSYTLAAQAYTWY\nHLA-C07:530,YDSGYREKYRQADVSNLYLSSDSYTLAALAYTWY\nHLA-C07:531,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:532,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:533,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:534,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:535,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:536,YDSGYRENYRQADVSNLYLRYNLYTLAALAYTWY\nHLA-C07:537,YDSGYRENYRQADVSNLYLRYDSYTLAALAYRWY\nHLA-C07:538,YDSGYREKYRQADVSNLYLNYDSYTLAALAYTWY\nHLA-C07:539,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:54,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:540,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:541,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:542,YDSGYREKYRQADVSNLYLRSDSYTLSALAYTWY\nHLA-C07:543,YDSGYREKYRQADVSNLYLRSDSYTLAALAYMWY\nHLA-C07:544,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:545,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:546,YDSEYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:547,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:548,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:549,YDSGYRENYRQADVSNLYLRYVSYTLAALAYTWY\nHLA-C07:550,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:552,YDSGYREKYRQADVSNLYFRYDFYTLAALAYTWY\nHLA-C07:553,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:554,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:555,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:556,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:557,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:558,YDSGYREKYRQADVSNLYLRCDSYTLAALAYTWY\nHLA-C07:559,YDSGYRENYRQADVSKLYLRYDSYTLAALAYTWY\nHLA-C07:56,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:560,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:561,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:562,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:563,YDSGYREKYRQTDVSNLYFRYDFYTLAADAYTWY\nHLA-C07:564,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:565,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:566,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:567,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:568,YDSGYRENYRQADVSNLYIRFDSYTLAALAYTWY\nHLA-C07:569,YDSGYREKYRQADVSNLYFRYDFYTLVADAYTWY\nHLA-C07:57,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:570,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:571,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:572,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:573,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:574,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:575,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:576,YDSGYREKYRQADVSNVYLRSDSYTLAALAYTWY\nHLA-C07:577,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:578,YYAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:579,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:58,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:580,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:581,YDSGYPENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:583,YYAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:584,YDSGNREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:585,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:586,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:587,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:588,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:589,YDSGYRENYRQADVSNLYFRYDSYTLAALAYTWY\nHLA-C07:59,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:590,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:591,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:592,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:594,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:595,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:596,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:597,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:598,YDSGYREKYRQADVNKLYLRSDSYTLAALAYTWY\nHLA-C07:599,YDSGYREKYRQADVSNLYLRSDSYTWAVLAYTWY\nHLA-C07:60,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:601,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:602,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:604,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:605,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:606,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:607,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:608,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:609,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:610,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:611,YDSGYREKYRQADVSNLYFRSDSYTLAALAYTWY\nHLA-C07:612,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:613,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:614,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:615,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:616,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:617,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:618,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:619,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:62,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:620,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:621,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:622,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:623,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:624,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:625,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:626,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:627,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:628,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:629,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:63,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:630,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWC\nHLA-C07:631,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:634,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:635,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:636,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:637,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:638,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:639,YDSGYRENYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:64,YSAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:640,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:641,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:642,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:643,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:644,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:645,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:646,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:647,YDSGYRENYRQADVSNLYLRYDSYNLAALAYTWY\nHLA-C07:648,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:649,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:65,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:650,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:651,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:652,YDSGYRENYRQADVSNLYLRYDSYTLAAPAYTWY\nHLA-C07:653,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:654,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:655,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:656,YDSGYREKYRQADVSKLYFRYDFYTLAADAYTWY\nHLA-C07:657,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:658,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:659,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:66,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:660,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:661,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:662,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:664,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:665,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:666,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:667,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:668,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:669,YDSGYRENYRQADMSNLYLRYDSYTLAALAYTWY\nHLA-C07:67,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:670,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:671,YDSGYRENYRQADVSNLYLRYDSYTWAELAYLWY\nHLA-C07:673,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:674,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:676,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:677,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:678,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:679,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:68,YDSGYREKYRQADVSNLYLRSDSYTLAADAYTWY\nHLA-C07:680,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:681,YDSGYREKYRQADVSNLYLSYDSYTLAALAYTWY\nHLA-C07:682,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:683,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:684,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:685,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:687,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:688,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:689,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:69,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:691,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:692,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:693,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:694,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:695,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:696,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:698,YDSVYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C07:699,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:70,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:700,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:701,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:703,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:704,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:705,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:706,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:707,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:708,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:709,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:71,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:710,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:711,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:712,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:713,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:714,YHSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:715,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:716,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:717,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:718,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:719,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:72,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:720,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:721,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:722,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:723,YYAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:724,YHSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:73,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:74,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:75,YDSGYREKYRQADVSNLHLRSDSYTLAALAYTWY\nHLA-C07:76,YDSGYREKYRQADVNKLYLRSDSYTLAALAYTWY\nHLA-C07:77,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:78,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:79,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:80,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:81,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:82,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:83,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:84,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:85,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:86,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:87,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:88,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:89,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:90,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:91,YDSGYRENYRQADVSNLYLRYDSYTLTALAYTWY\nHLA-C07:92,YYAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:93,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:94,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:95,YDSGYRENYRQADVSNLYLRYDSYTLAVLAYTWY\nHLA-C07:96,YYAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C07:97,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C07:99,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C0801,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C0802,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C0803,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C0804,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C0805,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C0806,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYAWY\nHLA-C0807,YYAGYREKYRQTDVSNLYLRYNFYTLAERAYTWY\nHLA-C0808,YYAGYREKYRQTDVSNLYLSYNFYTWATLAYTWY\nHLA-C0809,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C0810,YYAGYREKYRQTDVNKLYLRYNFYTWATLAYTWY\nHLA-C0811,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C0812,YYAGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C0813,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C0814,YYSGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:01,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:02,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:03,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:04,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C08:05,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C08:06,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYAWY\nHLA-C08:07,YYAGYREKYRQTDVSNLYLRYNFYTLAERAYTWY\nHLA-C08:08,YYAGYREKYRQTDVSNLYLSYNFYTWATLAYTWY\nHLA-C08:09,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C08:10,YYAGYREKYRQTDVNKLYLRYNFYTWATLAYTWY\nHLA-C08:100,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYKWY\nHLA-C08:101,YYAGYREKYRQTDVSNLYLRFNFYTWATLAYTWY\nHLA-C08:102,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:103,YYSGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:104,YYAGYREKYRQTDVSNLYLRYNFYTLAELAYTWY\nHLA-C08:105,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:106,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:107,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:108,YYAGYREKYRQTDVSNLYLRYDFYTWAERAYTWY\nHLA-C08:109,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:11,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C08:110,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:111,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:112,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:113,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYTWY\nHLA-C08:114,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWH\nHLA-C08:115,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYLWY\nHLA-C08:116,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:117,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:118,YYAGYREKYRQTDVSNLYLRYNFYTWATRAYTWY\nHLA-C08:119,CYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:12,YYAGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C08:120,YYAGYREKYRQTDVSNLHLRYNFYTWAERAYTWY\nHLA-C08:122,YYAGYPEKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:123,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:124,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:125,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:126,YYAGYREKYRQTDVSNLYIWYNFYTWAERAYTWY\nHLA-C08:128,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:13,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C08:131,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:132,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:133,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:134,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:135,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:136,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:137,YYAGYREKYRQADVSNLYLRYNFYTWATLAYTWY\nHLA-C08:138,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYLWY\nHLA-C08:139,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C08:14,YYSGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:140,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:142,YYAGYREKYRQTDVSNLYLRYNFYTWAEDAYTWY\nHLA-C08:143,YYAGYREKYRQTDVSNLYLRFNFYTWATLAYTWY\nHLA-C08:144,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:145,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:146,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:147,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:148,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:149,YYAGYREKYRQTDVSNLYLRYNFYTWAARAYTWY\nHLA-C08:15,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:150,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:151,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:152,YYAGYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-C08:153,YYAGYWEKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:154,YYAGYREKYRQTDVSNLYLRCNFYTWATLAYTWY\nHLA-C08:155,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:156,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:157,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:158,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:159,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:16,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:160,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C08:162,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:163,YSAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:164,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:165,YYAGYREKYRQTDVSNLYLRYNFYTWAVLAYTWY\nHLA-C08:166,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:167,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:168,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C08:169,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:17,YYAGYREKYCQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:170,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:171,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:172,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:174,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:175,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:176,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:177,YYAGYREKYRQTDVSNLYLRYNFYTWAMLAYTWY\nHLA-C08:178,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:18,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:19,YYAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C08:20,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:21,YYAGYREKYRQADVSNLYLRYNFYTWATLAYTWY\nHLA-C08:22,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:23,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:24,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:25,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C08:27,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C08:28,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:29,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C08:30,YYAGYREKYRQTDVSNLYLRYNFYTWPERAYTWY\nHLA-C08:31,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYEWY\nHLA-C08:32,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:33,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:34,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:35,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:37,YDAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:38,YYAGYREKYRQTDVSNLYLRSNFYTWATLAYTWY\nHLA-C08:39,YYAGYREKYRQTDVSNLYLRYNFYTWAVLAYTWY\nHLA-C08:40,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:41,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYLWY\nHLA-C08:42,YYAGYREKYRQTDVSNLYLRYKFYTWATLAYTWY\nHLA-C08:43,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWC\nHLA-C08:44,YYAGYREKYRQTDVSNLYIRYNFYTWATLAYTWY\nHLA-C08:45,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:46,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:47,YYAGYREKYRQTDVSNLYLRYNFYTLAARAYTWY\nHLA-C08:48,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:49,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:50,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:51,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWH\nHLA-C08:53,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:54,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C08:56,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:57,YYAGYREKYRQTDVSNLYLRYNFYTWAALAYTWY\nHLA-C08:58,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:59,YYAGYREKYRQTDVSNLYLRYNSYTWATLAYTWY\nHLA-C08:60,YYAGYREKYRQTDVSNLYLKYNFYTWATLAYTWY\nHLA-C08:61,YYAGYREKYRQTDVSNLYIRYNFYTWATLAYTWY\nHLA-C08:62,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:63,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:65,YYAGYREKYRQTDVSNLYLRYNFYTWASLAYTWY\nHLA-C08:66,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C08:67,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:68,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:69,YYAGYQEKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:71,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:72,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:73,YYAGYREKYRQTDESNLYLRYNFYTWAERAYTWY\nHLA-C08:74,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:75,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:76,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:77,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:78,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:79,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:80,YYSGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:81,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:82,YYAGYREKYRQTDVSNLYIRYNFYTWATLAYTWY\nHLA-C08:83,YYAGYREKYRQTDVSNLYLRYDFYTWATLAYTWY\nHLA-C08:84,YYAGYREKYRQTDVSNLYLRYNFYTSATLAYTWY\nHLA-C08:85,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:86,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:87,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:90,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:91,YYAGYRQKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:92,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:93,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTSY\nHLA-C08:94,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C08:95,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:96,YYARYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:97,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C08:98,YYAGYREKYRQTDVSNLYLRYNFYTWATLGYTWY\nHLA-C08:99,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C1202,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C1203,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C1204,YYAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C1205,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C1206,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C1207,YYAGYREKYRQADVGNLYLWYDSYTWAEWAYTWY\nHLA-C1208,YYAGYRENYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C1209,YYAGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C1210,YYAGYREKYRQADVSNLYLRFDSYTWAEWAYTWY\nHLA-C1211,YYAGYREKYRQADVSNLYLWSDSYTWAEWAYTWY\nHLA-C1212,YYAGYREKYRQADESNLYLWYDSYTWAEWAYTWY\nHLA-C1213,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C1214,YYAGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C1215,YYAGYREKYRQADVSNLYLWYDLYTWAEWAYTWY\nHLA-C1216,YDSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C1217,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:02,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:03,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:04,YYAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C12:05,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C12:06,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:07,YYAGYREKYRQADVGNLYLWYDSYTWAEWAYTWY\nHLA-C12:08,YYAGYRENYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:09,YYAGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C12:10,YYAGYREKYRQADVSNLYLRFDSYTWAEWAYTWY\nHLA-C12:100,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:101,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:102,YYAGYREKYRQADVSNLYLWYDSYAWAEWAYTWY\nHLA-C12:103,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:106,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYLWY\nHLA-C12:107,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:108,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:109,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:11,YYAGYREKYRQADVSNLYLWSDSYTWAEWAYTWY\nHLA-C12:110,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:111,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:112,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:113,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:114,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:115,CYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:116,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:117,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:118,YYAGYREKYRQADVSNLYLRSDSYTWAEWAYTWY\nHLA-C12:119,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYEWY\nHLA-C12:12,YYAGYREKYRQADESNLYLWYDSYTWAEWAYTWY\nHLA-C12:120,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:121,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:122,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYLWY\nHLA-C12:123,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:124,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:125,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:126,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:127,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:128,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:129,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:13,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:130,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:131,YYAGYQEKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:132,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:133,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:134,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:135,YYAGYREKYRQADVSKLYLWYDSYTWAEWAYTWY\nHLA-C12:136,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:137,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:138,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:139,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:14,YYAGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C12:140,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:141,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:142,FYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:143,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:144,YYAGYREKYRQADVSNLYLWYNFYTWAEWAYTWY\nHLA-C12:145,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:146,YYAGYREKYRQADVNKLYLRYDSYTWAEWAYTWY\nHLA-C12:147,YDSGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:149,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:15,YYAGYREKYRQADVSNLYLWYDLYTWAEWAYTWY\nHLA-C12:150,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:151,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:152,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:153,YYAGYREKYRQADVSNLYLWYDSYTWAEWVYTWY\nHLA-C12:154,YYAGYREKYRQADVSKLYLWYDSYTWAEWAYTWY\nHLA-C12:156,YYAGYREKYRQADVSNLYLWFDSYTWAEWAYTWY\nHLA-C12:157,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:158,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYMWY\nHLA-C12:159,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:16,YDSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:160,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:161,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:162,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:163,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:164,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:165,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:166,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:167,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:168,YYAGYREKYRQADVSNLYLTYDSYTWAEWAYTWY\nHLA-C12:169,YYAGYREKYRQADVSNLYLRYDSYTWATLAYTWY\nHLA-C12:17,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:170,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:171,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:172,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:173,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:174,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:175,YYAGYREKYRQADVSNLYLWYDSYTWAVLAYTWY\nHLA-C12:176,YYAGYREKYRQADVSNLYLWYDSYTLAAWAYTWY\nHLA-C12:177,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:178,YYAGYREKYRQADVSNLYLWCDSYTWAEWAYTWY\nHLA-C12:179,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:18,YYAGYREKYRQADVSNLYLRYDSYTWAELAYTWY\nHLA-C12:180,YYAGYREKYRQADVSNLYLWYDSYTWAVWAYTWY\nHLA-C12:181,YYAGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C12:182,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:183,YYAGYREKYRQADVSNLYWTYDSYTWAEWAYTWY\nHLA-C12:184,YYAEYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:185,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:186,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:187,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:188,YYAGYRENYRQTDVNNLYLRYDSYTWAEWAYTWY\nHLA-C12:189,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:19,YYAGYREKYRQADVSNLYLWYDSYTWAECAYTWY\nHLA-C12:190,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:191,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:192,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:193,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:194,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:195,YDAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:196,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:197,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:198,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYEWY\nHLA-C12:199,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:20,YYAGYREKYRQADVSNLYLWYDSYTWAELAYTWY\nHLA-C12:200,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:201,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:202,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:203,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:204,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:205,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:206,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:207,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:208,YYAGYREKYRQADVSNLYLRYDLYTWAEWAYTWY\nHLA-C12:209,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:21,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYTWY\nHLA-C12:210,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:211,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:212,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:213,YYAGYREKYRQADVSNLYLWYDSYIWAEWAYTWY\nHLA-C12:214,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:215,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:216,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:217,YDAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:218,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:22,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:220,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:221,YYAGYREKYRQADVSNLYLRHDSYTWAEWAYTWY\nHLA-C12:222,YYAGYREKYRQADVSNLYLRYDYYTWAEWAYTWY\nHLA-C12:223,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:224,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:225,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:226,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:227,YYSGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:228,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:229,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:23,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:230,YYAGYREKYRQADVSNLCLWYDSYTWAEWAYTWY\nHLA-C12:231,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:233,YYAGYREKYRQADVSNLYLRFNFYTWAERAYTWY\nHLA-C12:234,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:235,YYAGYREKYRQADVSNLYLWYDYYTWAEWAYTWY\nHLA-C12:237,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:238,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:239,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:24,YYAGYREKYRQADVSNLYLWYDSYTWAERAYTWY\nHLA-C12:240,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:241,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:242,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:243,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:244,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:245,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:246,YYAGYREKYRQADLSNLYLWYDSYTWAEWAYTWY\nHLA-C12:247,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:248,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:249,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:25,YYAGYPEKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:250,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:251,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:252,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:253,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:254,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:255,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:256,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:257,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:258,YYAGYREKYRQADVSNLYLGYDSYTWAEWAYTWY\nHLA-C12:259,YYAGYREKYRQADVSNLYLWYDSDTWAEWAYTWY\nHLA-C12:26,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:260,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:261,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:262,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:263,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:264,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:265,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:266,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:267,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:27,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:28,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:29,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:30,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:31,YYAGYREKYRQADVSNLYLWYNFYTWAEWAYTWY\nHLA-C12:32,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:33,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C12:34,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:35,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:36,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:37,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:38,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:40,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:41,YYAGYREKYRQADVNKLYLRYDSYTWAEWAYTWY\nHLA-C12:43,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:44,YYAGYREKYRQADVSNLYIRYDSYTWAEWAYTWY\nHLA-C12:45,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:47,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:48,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:49,YYAGYREKYRQTDVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:50,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:51,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:52,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:53,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:54,YYAGYREKYRQADVNNLYLWYDSYTWAEWAYTWY\nHLA-C12:55,YSAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:56,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:57,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:58,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:59,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:60,YYAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C12:61,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:62,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:63,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:64,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:65,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:66,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:67,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:68,YYAGYLEKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:69,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:70,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:71,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:72,YYAGYREKYRQADVSKLYLRYDSYTWAEWAYTWY\nHLA-C12:73,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:74,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:75,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:76,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:77,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:78,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:79,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:81,YYAGYRENYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:82,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:83,YYAGYREKYRQADVSNLYLRYDSYTWAELAYLWY\nHLA-C12:85,YYAGYREKYRQADVSNLYLRYDSYTWAERAYTWY\nHLA-C12:86,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:87,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWH\nHLA-C12:88,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:89,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:90,YYAGYREKYGQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:91,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:92,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:93,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:94,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:95,YYAGYWEKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:96,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C12:97,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:98,YYAGYREKYRQTDVSNLYLWYDSYTWAEWAYTWY\nHLA-C12:99,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C1402,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C1403,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C1404,YSAGYREKYRQADVNNLYLWFDSYTWAERAYTWY\nHLA-C1405,YSAGYREKYRQTDVSNLYLWYDSYTWAERAYTWY\nHLA-C1406,YSAGYREKYRQTDVSNLYLWFDSYTWAELAYTWY\nHLA-C1407,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:02,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:03,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:04,YSAGYREKYRQADVNNLYLWFDSYTWAERAYTWY\nHLA-C14:05,YSAGYREKYRQTDVSNLYLWYDSYTWAERAYTWY\nHLA-C14:06,YSAGYREKYRQTDVSNLYLWFDSYTWAELAYTWY\nHLA-C14:08,YSAGYREKYRQTDVSNLYPWFDSYTWAERAYTWY\nHLA-C14:09,YSAGYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-C14:10,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:100,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:101,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:102,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:103,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:104,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:11,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:12,YSAGYREKYRQTDVNKLYLWFDSYTWAERAYTWY\nHLA-C14:13,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:14,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:15,YSAGYREKYRQTDVSNLYLWFDSYTWAALAYTWY\nHLA-C14:16,YSAGYREKYRQTDVSNLYLWFDSYTWAEWAYTWY\nHLA-C14:17,YSAGYREKYRQTDVSNLYLWFDSYTLAARAYTWY\nHLA-C14:18,YSSGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:19,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:20,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYLWY\nHLA-C14:22,YSARYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:23,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:24,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:25,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYEWY\nHLA-C14:26,YSAGYREKYRQTDVSNLYLWFDFYTWAERAYTWY\nHLA-C14:27,YSARYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:28,YSAGYREKYRQTDVSNLYLRFDSYTWAERAYTWY\nHLA-C14:29,YSAGYREKYRQTDVSNLYLWFDSYTRAERAYTWY\nHLA-C14:30,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:31,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:32,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:33,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:34,CSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:36,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:37,YSAGYRKKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:38,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYMWY\nHLA-C14:39,YSAGYQEKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:40,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:41,CSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:42,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:43,YSAGYREKYRQTDVSNLYLWFDSYPWAERAYTWY\nHLA-C14:44,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:45,YSAGYREKYRQTDVSNLYLWCDSYTWAERAYTWY\nHLA-C14:46,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:48,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:49,YSAGYREKYRQADVNKLYLWFDSYTWAERAYTWY\nHLA-C14:50,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:51,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:52,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:53,YSAGYREKYRQTDVSNLYLWFDSYTWAVLAYTWY\nHLA-C14:54,YSAGYREKYRQTDVSNLYIRYDYYTWAERAYTWY\nHLA-C14:55,YSAGYREKYRQTDVSNLYLWFDYYTWAERAYTWY\nHLA-C14:56,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:57,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:58,YFAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:59,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:60,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:61,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTSY\nHLA-C14:62,YSAGYREKYGQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:63,YSAGYREKYRQTDVSNLYLWFDSYTWAERTYTWY\nHLA-C14:64,YSAGYREKYRQADVNNLYLWFDSYTWAERAYTWY\nHLA-C14:65,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:66,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:67,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:68,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:69,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:70,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:71,YSAGYREKYRQTDVSNLYLWFDSYTWAVRAYTWY\nHLA-C14:72,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:73,YSAGYRENYRQADVSNLYLWFDSYTWAERAYTWY\nHLA-C14:74,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:75,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:76,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWH\nHLA-C14:77,YSAGYREKYRQADVNNLYLWFDSYTWAELAYTWY\nHLA-C14:78,YTAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:79,YSAGYREKYRQTDVSNLYLWFDDYTWAERAYTWY\nHLA-C14:80,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:81,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:82,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:83,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:84,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:85,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:86,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:87,YSAGYREKYRQTDVSNLYLWFDSYTWATLAYTWY\nHLA-C14:88,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:89,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:90,YSAGYREKYRQTDVSNLYLWFDSYTWAEQAYTWY\nHLA-C14:91,YSAGYREKYRQTDVSNLYLWFDSYTWAERAHTWY\nHLA-C14:92,YSAGYREKYRQTDVSNLYWTYNYYTWAERAYTWY\nHLA-C14:94,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:95,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:96,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C14:98,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C1502,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C1503,YYAGYRENYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C1504,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C1505,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C1506,YYAGYRENYRQTDVNKLYIRYDYYTWAELAYTWY\nHLA-C1507,YYAGYRENYRQTDVSNLYIRYDLYTWAELAYTWY\nHLA-C1508,YYAGYRENYRQTDVNKLYIRYDLYTWAERAYTWY\nHLA-C1509,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C1510,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C1511,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C1512,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C1513,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C1514,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C1515,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYLWY\nHLA-C1516,YYAGYREKYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C1517,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:02,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:03,YYAGYRENYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C15:04,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:05,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:06,YYAGYRENYRQTDVNKLYIRYDYYTWAELAYTWY\nHLA-C15:07,YYAGYRENYRQTDVSNLYIRYDLYTWAELAYTWY\nHLA-C15:08,YYAGYRENYRQTDVNKLYIRYDLYTWAERAYTWY\nHLA-C15:09,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:10,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:100,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:101,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:102,YYAGYRENYRQTDVNKLYLWCDYYTWAERAYTWY\nHLA-C15:103,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:104,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:106,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:107,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:108,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:109,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:11,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:110,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:111,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYEWY\nHLA-C15:112,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:113,YYSGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:114,YYARYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:116,YYAGYRENYRQTDVSKLYIRYDFYTWAELAYTWY\nHLA-C15:117,YYAGYRENYRQTDVNKLYIRYDFYTWAVLAYTWY\nHLA-C15:118,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:119,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:12,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:120,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:121,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:123,YYAGYRENYRQTDVNKLYISYDLYTWAELAYTWY\nHLA-C15:124,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:125,YYAGYRENYRQTDVNKLYLWYDFYTWAELAYTWY\nHLA-C15:126,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:127,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:128,YYAGYRENYRQTDVNKLYIRYNLYTWAELAYTWY\nHLA-C15:129,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:13,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:130,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWH\nHLA-C15:131,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:132,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:133,YYAGYRENYRQTDVNKLYIWYDLYTWAELAYTWY\nHLA-C15:134,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:135,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:136,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:137,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:138,YDAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:139,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:140,YYAGYRENYRQTDVNKLYISYDFYTWAELAYTWY\nHLA-C15:141,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:142,YYAGYRENYRQTDVNKLYIRYELYTWAELAYTWY\nHLA-C15:143,YYAGYRENYRQTDVNKLYIRYDSYTWAERAYTWY\nHLA-C15:144,YYAGYRENYRQTDVSNLYIRYDLYTWAELAYTWY\nHLA-C15:146,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:147,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:148,YYAGYRENYRQTDVNKLYIRYDFYTWAERAYTWY\nHLA-C15:149,YYAGYRENYRQTDVNKLYFRYDLYTWAELAYTWY\nHLA-C15:15,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYLWY\nHLA-C15:150,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:151,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:152,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:153,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:154,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:155,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:157,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:158,YDAGYRENYRQTDVNKLYIRYDYYTWAELAYTWY\nHLA-C15:159,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYMWY\nHLA-C15:16,YYAGYREKYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C15:161,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:162,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:163,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:165,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:166,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:167,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:168,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:169,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:17,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:170,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:171,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:172,YYAGYRENYRQTDVNKLYIRSDLYTWAELAYTWY\nHLA-C15:173,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:174,YYAGYRENYRQTDVNKLYIRHDLYTWAELAYTWY\nHLA-C15:175,YYAGYWENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:176,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:178,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:179,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:18,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:180,YYAGYRENYRQTDVNKLYIRYDYYTWADLAYTWY\nHLA-C15:181,YYAGYRENYRQTDVNNLYIRYDFYTWAELAYTWY\nHLA-C15:182,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:183,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:19,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:20,YYAGYREKYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:21,YYAGYRENYRQTDVSKLYIRYDLYTWAELAYTWY\nHLA-C15:22,YYAGYRENYRQTDVNKLYLRYDFYTWAELAYTWY\nHLA-C15:23,YDAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:24,YYAGYRENYRQTDVNKLYIRYNYYTWAELAYTWY\nHLA-C15:25,YYAGYREKYRQADVSNLYIRYNFYTWAEDAYTSY\nHLA-C15:26,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:27,YYAGYRNKYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:28,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:29,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:30,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:31,YYAGYRENYRQTDVNKLYIRYDLYTWAALAYTWY\nHLA-C15:33,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:34,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:35,YYAGYRENYRQTDVNKLHIRYDLYTWAELAYTWY\nHLA-C15:36,YSAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:37,YYAGYRENYRQTDVNKLYLWCDLYTWAELAYTWY\nHLA-C15:38,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:39,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWH\nHLA-C15:40,YYAGYRENYRQTDVNKLYIRYDYYTWAELAYTWY\nHLA-C15:41,YYAGYRENYRQTDVNKLYNRYDLYTWAELAYTWY\nHLA-C15:42,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYEWY\nHLA-C15:43,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYTWY\nHLA-C15:44,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:45,YYAGYRENYRQTDENKLYIRYDLYTWAELAYTWY\nHLA-C15:46,YYAGYRENYRQTDVNKLYIRSDFYTWAELAYTWY\nHLA-C15:47,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:48,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:49,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:50,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:51,YYAGYRENYRQTDVNKLYIRYDLYTWAELTYTWY\nHLA-C15:52,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:53,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:54,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:55,YYAGYRENYRQTDVNKLYLWYDYYTWAELAYTWY\nHLA-C15:56,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:57,YYAGYRENYRQTDVNKLYIRYDLYTWAVLAYTWY\nHLA-C15:58,YYAGYRENYRQTDVNKLYLWYDLYTWAELAYTWY\nHLA-C15:59,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:60,YYAGYRENYRQTDVNQLYIRYDLYTWAELAYTWY\nHLA-C15:61,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:62,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:63,YDSGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:64,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:65,YYAGYRENYRQTDVNKLYLRYDSYTLAALAYTWY\nHLA-C15:66,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C15:67,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:68,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:69,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:70,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:71,YYAGYRENYRQTDVNILYIRYDLYTWAELAYTWY\nHLA-C15:72,YYAGYRENYRQTDVNKLYLRYDLYTWAELAYTWY\nHLA-C15:73,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:74,YYAGYRENYRQTDVNKLYIRYDLYTWAEWAYTWY\nHLA-C15:75,YYAGYRENYRQTDVNKLYIRYDLYTWAAQAYTWY\nHLA-C15:76,YYAGYRENYRQTDVNKLYIRYNFYTWAELAYTWY\nHLA-C15:77,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYLWY\nHLA-C15:78,YYAGYRENYRQTDVNKLYIRFDLYTWAELAYTWY\nHLA-C15:79,YYAGYRENYRQTDVNKLYIRYDLYIWAELAYTWY\nHLA-C15:80,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:81,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:82,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:83,YYAGYRENYRQTNVNKLYIRYDLYTWAELAYTWY\nHLA-C15:85,YYAGYRENYRQTDVNNLYIRYDLYTWAELAYTWY\nHLA-C15:86,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:87,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:88,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:89,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:90,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C15:91,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:93,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:94,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:97,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:98,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C15:99,YYAEYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C1601,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C1602,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C1604,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C1606,YYAGYREKYRQTDVSNLYLRSDSYTWAAQAYTWY\nHLA-C1607,YYAGYREKYRQTDVSNLYLRYDSYTWAAQAYTWY\nHLA-C1608,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:01,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:02,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:04,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C16:06,YYAGYREKYRQTDVSNLYLRSDSYTWAAQAYTWY\nHLA-C16:07,YYAGYREKYRQTDVSNLYLRYDSYTWAAQAYTWY\nHLA-C16:08,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:09,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:10,YYAGYREKYRQTDVSNLYLWYDDYTWAAQAYTWY\nHLA-C16:100,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:101,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:102,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:103,YYAGYREKYRQTDVNKLYLWYDSYTWAAQVYTWY\nHLA-C16:104,YYAGYREKYRQTDVNKLYLWFDSYTWAAQAYTWY\nHLA-C16:105,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:106,YYAGYREKYRQTDVRNLYLWYDSYTWAAQAYTWY\nHLA-C16:107,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:108,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:109,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C16:11,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:110,YYAGYREKYRQTDVSNLYLWYDSYTWAELAYTWY\nHLA-C16:111,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:112,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:113,YYAGYREKYRQTDVSNLYLWYDSYTWAEQAYTWY\nHLA-C16:114,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:115,YYAGYREKYRQTDVNKLYLWSDSYTWAAQAYTWY\nHLA-C16:116,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:117,YYAGYREKYRQTDVSNLYLRFDSYTWAAQAYTWY\nHLA-C16:118,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:119,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:12,YYAGYGEKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:120,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:121,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:122,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:124,YYAGYREKYRQTDVSNLYLWHDSYTWAAWAYTWY\nHLA-C16:125,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:126,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:127,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:128,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:129,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:13,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:130,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:131,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:133,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:134,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:135,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:136,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:137,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:138,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:139,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:14,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:140,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:141,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:142,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:143,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:144,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:145,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:146,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:15,YYAGYREKYRQADVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:17,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:18,YYAGYREKYRQTDVSNLYLWCDSYTWAAQAYTWY\nHLA-C16:19,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:20,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:21,YDAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:22,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:23,YYAGYREKYRQTDVSNLYLWFDSYTWAAQAYTWY\nHLA-C16:24,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:25,YYAGYREKYRQADVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:26,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:27,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:28,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:29,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYEWY\nHLA-C16:31,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:32,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:33,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYAWY\nHLA-C16:34,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:35,YYAGYREKYRQTDVSNLYLWYDSYTWAALAYTWY\nHLA-C16:36,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:37,YYAGYREKYRQTDVSKLYLWYDSYTWAAQAYTWY\nHLA-C16:38,YYARYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:39,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:40,YYAGYREKYRQTDVSNLYLWYDSYTWAELAYTWY\nHLA-C16:41,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWC\nHLA-C16:42,YYAGYQEKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C16:43,YYAGYREKYRQTDVSNLYLWYDSYTWAVQAYTWY\nHLA-C16:44,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:45,YYAGYREKYRQTDVSNLYLWYNFYTWAAQAYTWY\nHLA-C16:46,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:47,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:48,YYAGYREKYRQTDVNKLYLWYDSYTWAALAYTWY\nHLA-C16:49,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:50,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYEWY\nHLA-C16:51,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:52,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:53,YYAGYREKYRQTDVSNLYLWYDSYTWAERAYTWY\nHLA-C16:54,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:55,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C16:56,YYAGYQEKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:57,YYAGYREKYRQTDVNNLYLWYDSYTWAAQAYTWY\nHLA-C16:58,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:59,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:60,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:61,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C16:62,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:63,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:64,YYAGYRENYRQADVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:65,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:66,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C16:67,YYAGYREKYRQTDVSNLYLWYRDYTWAAQAYTWY\nHLA-C16:68,YYAGYREKYRQTDVSNLYLWYDSYTWAARAYTWY\nHLA-C16:69,YYAGYREKYRQTGVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:70,YYAGYRENYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:71,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:72,YYAGYREKYRQTDVSNLYLWYDSYTWAPQAYTWY\nHLA-C16:73,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:74,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:75,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:76,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:78,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C16:79,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:80,YDSGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:81,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:82,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C16:83,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:84,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:85,YYTKYREISTNTYENTAYLWYDSYTWAAQAYTWY\nHLA-C16:86,YHAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:87,YYAGYRENYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:88,YYAGYREKYRQTDVNKLYLWYDSYTWAARAYTWY\nHLA-C16:90,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C16:91,YYAGYREKYRQTDVNKLYLWYDSYTWAAWAYTWY\nHLA-C16:92,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:93,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAHTWY\nHLA-C16:94,YYAGYREKYRQTDVSNMYLWYDSYTWAAQAYTWY\nHLA-C16:95,YYAGYREKCRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:96,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:97,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:98,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C16:99,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C1701,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C1702,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C1703,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C1704,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:01,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:02,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:03,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:04,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:05,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:06,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:07,YYAGYREKYRQADVNKLYIRYNFYSLAELAYLWY\nHLA-C17:08,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:09,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:10,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:11,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:12,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:13,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:14,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:15,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:16,YYAGYREKYRQADVNKLYLRYNFYSLAELAYEWY\nHLA-C17:17,YYAGYREKYRQADVNKLYIRYNFYSLAERAYTWY\nHLA-C17:18,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:19,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:20,YDAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:21,YYAGYREKYRQADVNKLYLWYNFYSLAELAYEWY\nHLA-C17:22,YYAGYREKYRQADVSNLYIRYNFYSLAELAYEWY\nHLA-C17:23,YYAGYREKYRQADVNKLYIRYNFYSLAELAYTWY\nHLA-C17:24,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:25,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:26,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:28,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:29,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:30,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:31,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:32,YYAGYREKYRQADVNKLYIRYNFYSWAELAYEWY\nHLA-C17:33,YYAGYREKYRQADVNKLYIRYNFYSLAELAYTWY\nHLA-C17:34,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:35,YYAGYREKYRQADVNKLYIRYNFYTWAELAYEWY\nHLA-C17:36,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:37,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:38,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:39,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C17:40,YYAGYREKYRQTDVNKLYIRYNFYSLAELAYEWY\nHLA-C17:41,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C1801,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C1802,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:01,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:02,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:03,YDSGYREKYRQADVNKLYLRFNFYTWAEWAYEWY\nHLA-C18:04,YDAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:05,YDSGYRENYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:06,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:08,YDSGYREKYRQADVNKLYLRFNFYTWAERAYLWY\nHLA-C18:09,YYSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:10,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:11,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C18:12,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-E0101,YHSMYRESADTIFVNTLYLWHEFYSSAEQAYTWY\nHLA-E0103,YHSMYRESADTIFVNTLYLWHEFYSSAEQAYTWY\nHLA-E01:01,YHSMYRESADTIFVNTLYLWHEFYSSAEQAYTWY\nHLA-E01:03,YHSMYRESADTIFVNTLYLWHEFYSSAEQAYTWY\nHLA-G0101,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G0102,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G0103,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G0104,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G0106,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G0107,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G0108,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G0109,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAHTWY\nHLA-G01:01,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G01:02,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G01:03,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G01:04,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G01:06,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G01:07,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G01:08,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAYTWY\nHLA-G01:09,YSAMYEENTAHTDMNTLYLWIEYYSCAVRAHTWY\nMamu-A01,YYAMYRENMTENAVNTLYLRVEYYTWAVMAYQWY\nMamu-A02,YYAMYRENMAENAVNNLYIRYHSYTWAEHTYEWY\nMamu-A03,YYSEYRNIYAENAVANLYFRVEYYTWAEIAYEWY\nMamu-A04,YYAMYREIMTENAVANLYYVYEFYTWAVHTYEWY\nMamu-A0505,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWY\nMamu-A0506,YYSEYREICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A0507,YYSEYQNICADTLENTLYLTYDSYTWAAWAYQSY\nMamu-A0509,YYSEYQNICADTLENTLYLTYDSYTWAVWAYQSY\nMamu-A0510,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A0511,YYSEYQNICADTLENTLYITYDSYTWAVWAYESH\nMamu-A06,YYSEYQNICADTYESNLYLRYEYYTWAEIAYEWY\nMamu-A0602,YYSEYRNIYANTYESNLYLRYEYYTWAEIAYEWY\nMamu-A07,YYSEYRNICANTYESNLYIRYEFYTWAAMAYEWH\nMamu-A07,YYSEYRNICANTYESNLYIRYEFYTWAAMAYEWH\nMamu-A0703,YYSEYRNICANTYESNLYIRYDSYTWAAMAYEWH\nMamu-A1001:01,YYSMYREKMTETYGNTLYITYEYYTWAVWAYEWY\nMamu-A1002:01,YYAMYRENMAENAVNNLYIRYHSYTWAEHTYEWY\nMamu-A1011:01,YHTKYREISANTYENTAYFTYDYYTWAVHTYEWY\nMamu-A11,YHTKYREISANTYENTAYFTYDYYTWAVHTYEWY\nMamu-A11,YHTKYREISANTYENTAYFTYDYYTWAVHTYEWY\nMamu-A1305,YYAMYRENMTANAVANLYFRVEYYTWAVMAYEWY\nMamu-A1306,YYAMYRENMTANAVANLYFRYEYYTWAVMAYEWY\nMamu-A1602,YYAMYRNYIAENAVNTLYIRYEFYTWAVLAYEWH\nMamu-A19,YSSEYRNICAETYESNLYFNYEFYTWAAHTYRSY\nMamu-A1:00101,YYAMYRENMTENAVNTLYLRVEYYTWAVMAYQWY\nMamu-A1:00102,YYAMYRENMTENAVNTLYVRVEYYTWAVMAYQWY\nMamu-A1:00103,YYAMYRENMTENAVNTLYLRVEYYTWAVMAYEWY\nMamu-A1:00104,YYAMYRENMTENAVNTLYLRVEYYTWAVMAYEWY\nMamu-A1:00105,YYAMYRENMAENAVNNLYLRVESYTWAVMAYKSY\nMamu-A1:00201,YYAMYRENMAENAVNNLYIRYHSYTWAEHTYEWY\nMamu-A1:00301,YYSEYRNIYAENAVANLYFRVEYYTWAEIAYEWY\nMamu-A1:00302,YYSEYRNIYAENAVNNLYFRYEYYTWAEMAYEWY\nMamu-A1:00303,YSSEYRNICAANAVANLYYRYDFYTWAAMAYLWY\nMamu-A1:00304,YYSEYRNICAENAVANLYFRVEYYTWAEIAYEWY\nMamu-A1:00305,YYSEYRNIYAENAVANLYYRYYSYTWAVIAYEWY\nMamu-A1:00306,YYSEYRNIYAENAVANLYFRVEYYTWAVMAYRSY\nMamu-A1:00307,YYSEYRNIYAENAVANLYFRYEYYTWAVMAYRSY\nMamu-A1:00308,YYSEYRNIYAENAVANLYYRYYDYTWAWIAYEWY\nMamu-A1:00310,YYAEYRNIYAENAVANLYYRYYDYTWAWIAYEWY\nMamu-A1:00401,YYAMYREIMTENAVANLYYVYEFYTWAVHTYEWY\nMamu-A1:00402,YYAMYREIMTENAVANLYYVYEFYTWAAHTYEWY\nMamu-A1:00403,YYAMYRENMTERAVATLYYVYEFYTWAVWAYQWY\nMamu-A1:00601,YYSEYQNICADTYESNLYLRYEYYTWAEIAYEWY\nMamu-A1:00602,YYSEYRNIYANTYESNLYLRYEYYTWAEIAYEWY\nMamu-A1:00701,YYSEYRNICANTYESNLYIRYEFYTWAAMAYEWH\nMamu-A1:00702,YYSEYRNICADTYESNLYIRYEFYTWAAMAYEWH\nMamu-A1:00703,YYSEYRNICANTYESNLYIRYDSYTWAAMAYEWH\nMamu-A1:00704,YYSEYRNIAADTYENTLYIRYEYYTWAAMAYEWH\nMamu-A1:00705,YYSEYRNICADTYENTLYIRYESYTWAAMAYEWH\nMamu-A1:00801,YYSEYRNIYAANYEGNLYYTYDSYTWAEFTYEWY\nMamu-A1:01001,YYSMYREKMTETYGNTLYITYEYYTWAVWAYEWY\nMamu-A1:01002,YYSMYREKMTEIYGNNLYITYEFYTWAAWAYEWY\nMamu-A1:01101,YHTKYREISANTYENTAYFTYDYYTWAVHTYEWY\nMamu-A1:01102,YYTKYREISANTYENTAYFKYEFYTWAAMAYQSY\nMamu-A1:01103,YHTKYREISANTYENTAYFTYDYYTWAVHAYEWY\nMamu-A1:01104,YHTKYREISANTYENTAYFTYDYYTWAVHTYEWY\nMamu-A1:01201,YYAMYRENMTATYESNLYFRYSYYTWAEFTYRWY\nMamu-A1:01601,YYAMYRNYIAENAVNTLYIRYEFYTWAVLAYEWH\nMamu-A1:01801,YYAMYEEISANTYESNLYFRFSHYTWAWIAYEWY\nMamu-A1:01802,YYSEYEEISANTYESNLYFRYSHYTWAWFAYEWY\nMamu-A1:01803,YYATYEEISANTYESNLYFRYSYYTWAAYIYQWY\nMamu-A1:01804,YYATYEQIAANTYESNLYFRYSYYTWAAYIYQWY\nMamu-A1:01805,YYAMYEEISANTYESNLYFRFSHYTWAWIAYEWY\nMamu-A1:01806,YYATYEEISANTYESNLYFRYSYYTWAAYIYQWY\nMamu-A1:01807,YYSEYEEISANTYESNLYFRYSHYTWAWFAYEWY\nMamu-A1:01808,YYSMYEEISANTYESNLYFRYSHYTWAWIAYEWY\nMamu-A1:01901,YSSEYRNICAETYESNLYFNYEFYTWAAHTYRSY\nMamu-A1:01902,YSSEYRNICAETYESNLYFNYSYYTWAAHTYEWY\nMamu-A1:01903,YSSEYQNICADTLESNLYVKYSYYTWAVWAYEWY\nMamu-A1:01904,YSSEYRNICAENAVANLYFNYEFYTWAAHTYEWY\nMamu-A1:01905,YSSEYRNICAETYESNLYFNYDYYTWAAHTYEWY\nMamu-A1:01906,YSSEYRNICAENAVANLYFNYEFYTWAAHTYEWY\nMamu-A1:01907,YSSEYRNICAETYESNLYFNYSYYTWAAHTYEWY\nMamu-A1:02201,YYSEYRNIYAETYESNLYLRYDSYTWAARAYEWY\nMamu-A1:02202,YYSEYRNIYAETYESNLYLRYDSYTWAARAYEWY\nMamu-A1:02203,YYSEYRNIYANTYESNLYLTYDSYTWAARAYEWY\nMamu-A1:02301,YYSMYREIMAENAVANLYFRYNFYTWAERAYRWY\nMamu-A1:02302,YYSMYREIMAENAVANLYFRYNFYTWAERAYRWY\nMamu-A1:02501,YYAMYRENMTETYGNTLYIKYEYYTWAVHTYRWY\nMamu-A1:02502,YYAMYRENMTENAVDNLYIKYEYYTWAVHTYRWY\nMamu-A1:02601,YYAMYSQIMADSYESNLYIRLHHYTWAAWAYEWY\nMamu-A1:02602,YYAMYSQIMADSYESNLYIRLHHYTWAAWAYPWY\nMamu-A1:02603,YYAMYSQIMADSYESNLYLKYHHYTWAAWAYPWY\nMamu-A1:02801,YYAMYRENMTATYENTLYFRYEYYTWAVDAYEWY\nMamu-A1:02802,YYAMYRENMTATYENTAYVKYEYYTWAAMAYEWY\nMamu-A1:02803,YYAMYRENMTATYENTAYFRYEYYTWAVDAYEWY\nMamu-A1:02804,YYAMYREIMTATYENTAYVKYEYYTWAAMAYEWY\nMamu-A1:02805,YYAMYREIMTATYENTAYFKYEFYTWAADVYRSY\nMamu-A1:02806,YYAMYRNIMTATYENTAYFRVEYYTWAAMAYEWY\nMamu-A1:03201,YHTKYEEISANTYESNLYYRYSYYTWAVFTYRWY\nMamu-A1:03202,YHTKYEEISANTYESNLYYRYSYYTWAVFTYRWY\nMamu-A1:03203,YHTKYEEISANTYESNLYFRFSYYTWAEFTYRWY\nMamu-A1:03301,YYAMYSQIMADTYESNLYIWYHHYTWAVWAYRWY\nMamu-A1:04001,YYAMYRSIMAENAVANLYITYDSYTWAVDVYKWH\nMamu-A1:04002,YYAMYRSIMAGNAVANLYITYDSYTWAVDVYKWH\nMamu-A1:04003,YYAMYRSIMAENAVANLYITYDSYTWAVDVYKWH\nMamu-A1:04101,YYAMYSQIMTATYESNLYVTYESYTWAWMAYEWY\nMamu-A1:04102,YYAMYSQNMTATYESNLYVTYVYYTWAVMAYEWY\nMamu-A1:04201,YYAMYRNIMAENAVANLYYMYEFYTWEWDTYEWY\nMamu-A1:04301,YYAMYRNIMTANAVANLYFRYSYYTWAVFTYRSY\nMamu-A1:04501,YYAMYSQIMADTYVNTLYVRYEFYTWAADVYEWY\nMamu-A1:04801,YYAMYRNIMAEDAADTLYLKYYYYTWEWDTYLSY\nMamu-A1:04901,YYAMYRSYMTAMAVDNLYLTYFDYTWAVLAYTWH\nMamu-A1:04902,YYAMYRSYMTAMAVDNLYLTYFDYTWAVLAYTWH\nMamu-A1:04903,YYAMYRSYMTAMAVDNLYLTYFDYTWAVLAYTWH\nMamu-A1:04904,YYAMYRSYMTAMAVDNLYLTYFDYTWAVLAYTWH\nMamu-A1:05001,YYAEYREISANTYESNLYITYEFYTWAEQAYRSY\nMamu-A1:05101,YYAEYRNIYAENYEGILYIKYEFYTWAAHTYEWY\nMamu-A1:05201,YYAEYQNSYADNYEGTLYLTYDFYTWAELTYRSY\nMamu-A1:05301,YYSEYQNIYAENYEGNLYLTYESYTWAEWTYRSY\nMamu-A1:05302,YYSEYQNIYAENYEGNLYLTYESYTWAEWTYRSY\nMamu-A1:05401,YYSEYQNICAATYEGILYYRYSYYTWAVFTYLSY\nMamu-A1:05402,YYSEYQNIYAATYEGILYYRYSYYTWAVFTYLSY\nMamu-A1:05501,YHTKYREISANTYESILYYRYEFYTWAVIAYEWY\nMamu-A1:05601,YHTKYREISTNTYEGILYFRFSYYTWAEFTYRWY\nMamu-A1:05602,YHTKYREISTNTYEGILYYRYSYYTWAVFTYLWY\nMamu-A1:05603,YHTKYREISTNTYENTAYFRFSYYTWAEFTYRWY\nMamu-A1:05701,YYAMYSQIMADTYESNLYVRYHHYTWAVDVYRSY\nMamu-A1:05702,YYAMYSQIMADTYENTAYVRYHHYTWAVDVYRSY\nMamu-A1:05901,YYAMYRENMTATYVNTLYIWHNHYTWAVFAYEWH\nMamu-A1:06001,YYAMYRSYMTANAVNNLYIRVDHYTWAVMAYTWH\nMamu-A1:06101,YYAMYRENMTANAVNNLYIRYESYTWAWMVYEWY\nMamu-A1:06301,YYAEYQNIYAENYEGILYLTYEFYTWAEQAYRSY\nMamu-A1:06501,YYAMYQEKADTNYVNTAYIKYDSYTWAWMAYEWY\nMamu-A1:06601,YHTKYEEISANNYENTLYIKYDDYTWAVHAYESY\nMamu-A1:07301,YYAMYEEIMADTYENTAYFTYDSYTWAVDVYEWY\nMamu-A1:07401,YYALYRNICAENYENTLYIKYEFYTWAVQTYTWH\nMamu-A1:07402,YYALYRNICAETYENTAYIKYEFYTWAVHTYTWH\nMamu-A1:07403,YYALYRNICAETYENTAYIKYEFYTWAVHTYTWH\nMamu-A1:08101,YYAEYREKMTATYVNNLYVTYEYYTWAVDTYEWY\nMamu-A1:08501,YYAEYRNIYAENYESNLYVTYEYYTWAVDTYEWY\nMamu-A1:09101,YYSEYREISAETLENTLYIKYDSYTWAAMAYEWY\nMamu-A1:09201,YYAMYRNICADTYESNLYIRYHHYTWAVWAYRSY\nMamu-A1:10501,YYSEYRNIYAANYEGNLYFRYEYYTWAAMAYTWY\nMamu-A1:10502,YYSEYRNICAANYEGNLYFRYEYYTWAAMAYTWY\nMamu-A1:10503,YYSEYRNICAANYEGNLYFRYEFYTWAAMAYTWY\nMamu-A1:10504,YHSEYRNICAANYEGNLYFRYEYYTWAAMAYTWY\nMamu-A1:10601,YYSMYREIMAENAVANLYLTYDFYTWAELTYRSY\nMamu-A1:10701,YYSMYEEISANTYGNTLYITYEFYTWAVDVYRSY\nMamu-A1:10801,YYAMYRENMTATYENTLYIWYDSYTWAWIAYEWY\nMamu-A1:10901,YYSEYREISAETYEGILYYTYDYYTWAVMAYRWY\nMamu-A1:11001,YTAMYREKMTETSGNTLYIRVEFYTWAVMAYTWY\nMamu-A1:11101,YYSMYREKMTETSGNTLYIRVEFYTWAVMAYTWY\nMamu-A1:11201,YYSMYRNIMTANAVANLYIRYSHYTWAWFAYEWY\nMamu-A1:11301,YYSMYREICADTLENTLYITYDSYTWAVDVYRSY\nMamu-A20101,YYSEYEQRVGHTFVSNLYIRYESYTWAVHTYESY\nMamu-A20102,YYSEYEQRVGHTFVSNLYIRYESYTWAVHTYESY\nMamu-A21,YYSEYRNIYAENAVANLYFRVEYYTWAVMAYRSY\nMamu-A2201,YYSEYRNIYAETYESNLYLRYDSYTWAARAYEWY\nMamu-A23,YYSMYREIMAENAVANLYFRYNFYTWAERAYRWY\nMamu-A24,YYSEYRNISADTLADTLYITYYYYTWAEFAYEWY\nMamu-A25,YYAMYRENMTETYGNTLYIKYEYYTWAVHTYRWY\nMamu-A26,YYAMYSQIMADSYESNLYIRLHHYTWAAWAYEWY\nMamu-A2601,YYAMYSQIMADSYESNLYIRLHHYTWAAWAYEWY\nMamu-A28,YYAMYRENMTATYENTLYFRYEYYTWAVDAYEWY\nMamu-A2:0101,YYSEYEQRVGHTFVSNLYIRYESYTWAVHTYESY\nMamu-A2:0102,YYSEYEQRVGHTFVSNLYIRYESYTWAVHTYESY\nMamu-A2:0103,YYSEYEQRVGHTFVSNLYIRYESYTWAVHTYEWY\nMamu-A2:0501,YYSEYQNICADTLENTLYITYDSYTWAVWAYESH\nMamu-A2:05020,YYSEYQNICADTLENTLYLTYDSYTWAVWAYQSY\nMamu-A2:05030,YYSEYQNICADTLENTLYLTYDSYTWAVWAYQSY\nMamu-A2:05040,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0505,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWY\nMamu-A2:0506,YYSEYREICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0507,YYSEYQNICADTLENTLYLTYDSYTWAAWAYQSY\nMamu-A2:0509,YYSEYQNICADTLENTLYLTYDSYTWAVWAYQSY\nMamu-A2:0510,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0511,YYSEYQNICADTLENTLYITYDSYTWAVWAYESH\nMamu-A2:0512,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0513,YYSEYREICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0514,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:05150,YYSEYREICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:05160,YYSEYQNICADTLENTLYLTYDSYTWAVHTYQSY\nMamu-A2:0517,YYSEYQNICADTLENTLYLTYDSYTWAVHAYQSY\nMamu-A2:0518,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0519,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0520,YYSEYQNICADTLENTLYLTYDSYTWAVWAYESY\nMamu-A2:0521,YYSEYQNICADTLENTLYLTYDSYTWAVWAYESY\nMamu-A2:0522,YYSEYQNICADTLENTLYLTYDSYTWAVWAYESY\nMamu-A2:0523,YYSEYREICADTLENTLYITYDSYTWAVRAYEWH\nMamu-A2:0524,YYSEYQNICADTLENTLYITYDSYTWAVWTYEWH\nMamu-A2:0525,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0526,YYSEYQNIAANTFENTAYITYDSYTWAAHTYEWY\nMamu-A2:0527,YYSEYQNICADTLENTLYLTYDSYTWAVWVYQSY\nMamu-A2:0528,YYSEYQNICADTLENTLYITYDSYTWAVWAYESH\nMamu-A2:0529,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0531,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:05320,YYSEYQNICADTLENTLYITYDSYTWAVWAYESH\nMamu-A2:0533,YYSEYQNICADTLENTLYLTYDSYTWAAHTYQSY\nMamu-A2:0534,YYSEYQNICADTLENTLYITYDSYTWAAHTYEWH\nMamu-A2:0535,YYSEYQNICADTLENTLYITYDSYTWAVMAYEWH\nMamu-A2:0536,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0537,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0538,YYSEYQNICADTLENTLYLTYDSYTWAVWAYESY\nMamu-A2:0539,YYSEYQNICADTLENTLYLTYDSYTWAVHAYQSY\nMamu-A2:0540,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0541,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWY\nMamu-A2:0542,YYSEYREICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0543,YYSEYQNICADTLENTLYITYDSYTWAVWAYEWH\nMamu-A2:0544,YYSEYREICADTLEDTLYITYDSYTWAVWAYESY\nMamu-A2:0545,YYAEYQNICADTLENTLYITYDSYTWAVWAYESH\nMamu-A2:0546,YYSEYREICADTLENTLYITYDSYTWAVWAYESY\nMamu-A2:0547,YYSEYREICADTLENTLYITYEFYTWAVDVYRSY\nMamu-A2:2401,YYSEYRNISADTLADTLYITYYYYTWAEFAYEWY\nMamu-A2:2402,YYSEYREISAETYEDTLYITYYDYTWAEFAYEWH\nMamu-A2:2403,YYSEYRNISADTYENTLYITYYYYTWAEFAYEWY\nMamu-A3:1301,YYAMYRENMTANAVANLYFRYEFYTWAVMAYEWY\nMamu-A3:1302,YYAMYRENMTANAVANLYFRVEYYTWAVMAYRSY\nMamu-A3:1303,YYAMYRENMTENAVANLYFRVEYYTWAVMAYEWY\nMamu-A3:1304,YYAMYRENMTANAVANLYFRVEYYTWAVMAYRSY\nMamu-A3:1305,YYAMYRENMTANAVANLYFRVEYYTWAVMAYEWY\nMamu-A3:1306,YYAMYRENMTANAVANLYFRYEYYTWAVMAYEWY\nMamu-A3:1307,YYAMYRENMTANAVANLYFRVEFYTWAVMTYRSY\nMamu-A3:1308,YYAMYRENMTANAVANLYFRVEYYTWAVMAYEWY\nMamu-A3:1309,YYAMYRENMTANAVANLYVKYEYYTWAVMAYEWY\nMamu-A3:1310,YYAMYRENMTANAVANLYFRVEYYTWAVMAYEWY\nMamu-A3:1311,YYAMYRENMTENAVANLYFRVEYYTWAVMAYEWY\nMamu-A3:1312,YYAMYRENMTANAVANLYFRVEYYTWAVMAYEWY\nMamu-A3:1313,YYSMYRENMTANAVANLYFRVEFYTWAVMAYRSY\nMamu-A4:0101,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:01020,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:0103,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:0202,YYTMYRENMTANAVANLYIRVEYYTWAVMAYTWH\nMamu-A4:0203,YYTMYRENMTANAVANLYFRVEFYTWAVMAYTWH\nMamu-A4:0205,YYTMYRENMTANAVANLYFRVEYYTWAVMAYTWH\nMamu-A4:0301,YYAMYRENMTANAVANLYFTYEYYTWAAMAYTWH\nMamu-A4:0302,YYAMYRENMTANAVANLYFRVEYYTWAAMAYTWH\nMamu-A4:1402,YYAMYRENMTENAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:14030,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:1404,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRSY\nMamu-A4:1405,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:1406,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:1407,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:1408,YYAMYLENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A4:1409,YYAMYRENMTANAVANLYFRVEFYTWAVDVYRWY\nMamu-A5:30010,YYAMYEEIMAEDAVDTLYFVYGFYTWSRHAYEWY\nMamu-A5:3002,YYAMYEEIMAEDAVDTLYFVYEFYTWSRHTYEWY\nMamu-A5:3003,YYAMYEEIMAEDAVDTLYFVYGFYTWSRHAYEWY\nMamu-A5:3004,YYAMYEEIMAEDAVDTLYFVYGFYTWSRHAYEWY\nMamu-A5:3005,YYAMYEEIMAEDAVDTLYFVYGYYTWSRHAYEWY\nMamu-A5:3006,YYAMYEEIMAEDAVDTLYFVYGFYTWSRHAYEWY\nMamu-A6:0101,YHAMYREKMTEMAVANLYIVYSYYTLAAHAYLSY\nMamu-A6:0102,YHAMYREKMTEMAVANLYIVYSYYTLAAHAYLSY\nMamu-A6:0103,YHAMYREKMTEMAVANLYIVYSYYTLAAHAYLSY\nMamu-A6:0104,YHAMYREKMTEMAVANLYIVYSYYTLAAMAYLSY\nMamu-A6:0105,YHAMYREKMTEMAVANLYIVYSYYTLAAMAYLSY\nMamu-A70103,YYAMYREIMTATYGNTAYFKYEFYTWAAHTYEWY\nMamu-A7:0101,YYAMYRENMTATYGNTAYFTYEFYTWAAWAYEWY\nMamu-A7:0102,YYAMYRENMTATYENTAYFTYEFYTWAAWAYEWY\nMamu-A7:0103,YYAMYREIMTATYGNTAYFKYEFYTWAAHTYEWY\nMamu-A7:0201,YTSEYRNICAATYENTLYLKYEFYTWAAHTYEWY\nMamu-AG:01,YYAMYRENMTPTYVNTLYIKYEFYTWAAHTYEWY\nMamu-AG:02011,YYAMYRENMTATYENTLYIKYEFYTWAAHTYEWY\nMamu-AG:02012,YYAMYRENMTATYENTLYIKYEFYTWAAHTYEWY\nMamu-AG:0202,YYAMYRENMTATYENTLYIKYEFYTWAAHTYEWY\nMamu-AG:03011,YYAMYRENMTATYANTLYFKYEFYTWAAHTYEWY\nMamu-AG:0302,YYAMYRENMTATYANTLYFKYEFYTWAAHTYEWY\nMamu-B001:01,YHSMYREKAGNTDENIAYLMHYRYTWAVRAYRWY\nMamu-B003:01,YSSEYEENAGHTDADNLYLTYHYYTWAEVAYTWY\nMamu-B004:01,YSEMYEERAGNTFVGNLYYWYDFYTWAEQAYTWY\nMamu-B008:01,YSSEYEERAGHTDADTLYLTYHYYTWAEVAYTWY\nMamu-B01,YHSMYREKAGNTDENIAYLMHYRYTWAVRAYRWY\nMamu-B01,YHSMYREKAGNTDENIAYLMHYRYTWAVRAYRWY\nMamu-B017:04,YYSEYEARAEATHENTAYIKYHSYTWNYFAYEWY\nMamu-B02,YSEMYRNNSVTTFVGNLYLWYHFYTWAEMAYTWH\nMamu-B03,YSSEYEENAGHTDADNLYLTYHYYTWAEVAYTWY\nMamu-B03,YSSEYEENAGHTDADNLYLTYHYYTWAEVAYTWY\nMamu-B03901,YTELYEERAETTFVSTAYIWYDYYTWAEMAYRWY\nMamu-B04,YSEMYEERAGNTFVGNLYYWYDFYTWAEQAYTWY\nMamu-B04,YSEMYEERAGNTFVGNLYYWYDFYTWAEQAYTWY\nMamu-B05,YYAMYEQRVGHTFVSNLYLRSDYYTWAVLAYEWY\nMamu-B065:02,YSEMYEQRAGHTEGNTAYLWYDLYTWAEWAYEWY\nMamu-B07,YSEMYEARAGNTHGNTAYLWYEFYTWAALAYKWY\nMamu-B08,YSSEYEERAGHTDADTLYLTYHYYTWAEVAYTWY\nMamu-B08,YSSEYEERAGHTDADTLYLTYHYYTWAEVAYTWY\nMamu-B1001,YTEMYEQNSANTHVDTAYLTYHYYTWAERAYRWY\nMamu-B12,YSEMYEQNAANTDVSNLYVIYHFYTWDYFAYRWY\nMamu-B17,YYSEYEARAEATHENTAYIKYHSYTWNYFAYEWY\nMamu-B17,YYSEYEARAEATHENTAYIKYHSYTWNYFAYEWY\nMamu-B19,YTEMYRNRAGNTEGNTLYVRYHSYTWAEMAYEWH\nMamu-B20,YSEMYRNRAGHTHGNTLYVISHHYTWAEWAYGWY\nMamu-B21,YHSMYRNISAHTHGNTLYITYNHYTWDYFADASY\nMamu-B22,YTEMYRNRAANTDGNTAYIWYHFYTWAEQAYEWH\nMamu-B24,YSSMYRERAGNTFVSNLYLWYDSYTWAVQAYTWH\nMamu-B27,YHSEYEQNAAHSHVSNLYLKYDYYTWAALAYTWY\nMamu-B28,YGYMYDVRAARTDVDTLYIIYRDYTWAVMAYEWY\nMamu-B3002,YSSEYEQNTAHNHVCTVYLRFNYYTWTYFAYTSH\nMamu-B36,YYAMYQENMTATDADTLYITYEFYTWAEWAYEWY\nMamu-B37,YYAMYEQRVARTDVDTLYIIYRDYTWAVRAYTWY\nMamu-B38,YHEMYRNRAANTDGNTAYIWYYDYTWAALAYTWY\nMamu-B39,YTELYEERAETTFVSTAYIWYDYYTWAEMAYRWY\nMamu-B3901,YTELYEERAETTFVSTAYIWYDYYTWAEMAYRWY\nMamu-B40,YTEMYEQNSGHTHVNTAYIRYDFYTWAVMAYRWY\nMamu-B41,YSEMYEQIAGHTEGNTAYIWYHLYTWAEWAYEWY\nMamu-B43,YSEMYRNRAGNTEGSTLYFRYDSYTWAERAYTWH\nMamu-B44,YHAEYEQNAAHTHVSNLYLKYDYYTWAALAYTWY\nMamu-B45,YHSEYEEIAANTHGNTLYIKYDYYTWAAQAYEWY\nMamu-B46,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B47,YHSEYREKAAQTDVDTLYIWSRDYTWDYLAYTWY\nMamu-B48,YSAMYEEIADATFVGILYYWYHFYTWVEFAYRWY\nMamu-B48,YSAMYEEIADATFVGILYYWYHFYTWVEFAYRWY\nMamu-B49,YGVMYEEKADNTHVSNLYFRYYFYTWAVLAHPWY\nMamu-B5002,YSAMYRENAANTDVNTLYLMHHQYTWDYFAYEWY\nMamu-B52,YSEMYEERAGNTFVNTAYIGYHHYTWAVLAYRWY\nMamu-B53,YGAMYEQLAARTDVGTLYYWLHSYTWDYFAYEWY\nMamu-B55,YSAMYEEKADISFVSNLYYWVHYYTWVGFAYRSY\nMamu-B57,YHAEYEQIAGNTHVDTLYLTYHSYTWAAQAYRWY\nMamu-B5802,YSEMYRNNAGHTFGNTAYLWYHFYTWAEQAYEWY\nMamu-B6002,YSSMYEQLADFSFVGNLYLWYHFYTWAEVAHTWH\nMamu-B61,YSAMYRESAANTDVNTLYLMHHQYTWDYFAYEWY\nMamu-B63,YSAMYEQLADISFVGNLYLWYHFYTWAEMAYTWH\nMamu-B64,YGYVYEQIAARTDADTLYIWFHSYTWDYQAYEWY\nMamu-B65,YSEMYEERAGHTEGNTAYLWYDLYTWAEWAYEWY\nMamu-B66,YSYMYEEKAARTDVDTLYIIYRDYTWAVWAYTWY\nMamu-B6601,YSYMYEEKAARTDVDTLYIIYRDYTWAVWAYTWY\nMamu-B67,YSEMYEEIAANTDGNTLYVISHHYTWAERAYGWY\nMamu-B69,YTSMYEERAGHTDGGILYYRSHRYTWAVKAYEWY\nMamu-B70,YGYVYEQLAARTDADTLYIWFHSYTWDYMAYEWY\nMamu-B71,YSSMYRNKAAHTDVDTLYIMYRDYTWAVRAYLSY\nMamu-B8301,YSEMYEQNSARTDVDTLYITYRDYTWAAQAYRSY\nMamu-B8701,YSAMYEEKAGHTDENTLYLRSYRYTWAARAYRWY\nMamu-B:00101,YHSMYREKAGNTDENIAYLMHYRYTWAVRAYRWY\nMamu-B:00102,YHSMYREKAGNTDENIAYLMHYRYTWAVRAYRWY\nMamu-B:00201,YSEMYRNNSVTTFVGNLYLWYHFYTWAEMAYTWH\nMamu-B:00202,YSEMYRNNSVTTFVGNLYLWYHFYTWAEMAYTWH\nMamu-B:00301,YSSEYEENAGHTDADNLYLTYHYYTWAEVAYTWY\nMamu-B:00302,YSSEYEENAGHTDADNLYLTYHYYTWAEVAYTWY\nMamu-B:00401,YSEMYEERAGNTFVGNLYYWYDFYTWAEQAYTWY\nMamu-B:00501,YYAMYEQRVGHTFVSNLYLRSDYYTWAVLAYEWY\nMamu-B:00502,YYAMYEQRVGHTFVSNLYLRSDYYTWAVLAYEWY\nMamu-B:00601,YHAMYEEIAGHTEGNTLYVWHYFYTWAERAYQWY\nMamu-B:00602,YHAMYEEIAGHTEGNTLYVWHHLYTWAERAYQWY\nMamu-B:00701,YSEMYEARAGNTHGNTAYLWYEFYTWAALAYKWY\nMamu-B:00702,YSEMYEARAGNTHGNTAYLWYEFYTWAALAYKWY\nMamu-B:00703,YSEMYEARAGNTHGNTAYLWYEFYTWAALAYKWY\nMamu-B:00704,YSEMYEARAANTHGNTAYLWYEFYTWAALAYKWY\nMamu-B:00801,YSSEYEERAGHTDADTLYLTYHYYTWAEVAYTWY\nMamu-B:01001,YTEMYEQNSANTHVDTAYLTYHYYTWAERAYRWY\nMamu-B:01101,YHAEYEQIAGNAHGNNLYVTYEFYTWAADAYPWY\nMamu-B:01201,YSEMYEQNAANTDVSNLYVIYHFYTWDYFAYRWY\nMamu-B:01301,YGSEYEQIAANTDVDTLYIWYRDYTWDYFAYTWY\nMamu-B:01401,YYAMYEQRVGHTFVSNLYFRSDYYTWASLAYEWY\nMamu-B:01501,YTEMYEQNSGHTEENTLYIWYDSYTWAVLAYRWY\nMamu-B:01502,YTEMYEQNSGHTEENTLYIWYDSYTWAVLAYRWY\nMamu-B:01601,YTAMYEERAAHTDGGILYYKSYRYTWAALAYEWY\nMamu-B:01701,YYSEYEARAEATHENTAYIKYHSYTWNYFAYEWY\nMamu-B:01702,YYSEYEARAEATHENTAYIKYHSYTWNYFAYEWY\nMamu-B:01703,YYAEYEQRAEATHENTAYIKYHSYTWNYFAYEWY\nMamu-B:01704,YYSEYEARAEATHENTAYIKYHSYTWNYFAYEWY\nMamu-B:01801,YHEMYRNNAAHTDVDTLYIWYRDYTWDYFAYTWY\nMamu-B:01901,YTEMYRNRAGNTEGNTLYVRYHSYTWAEMAYEWH\nMamu-B:01902,YTEMYRNSAGNTEGNTLYVRYHSYTWAEMAYEWH\nMamu-B:01903,YTEMYRNRAGNTEGNTLYVRYHSYTWAEMAYEWY\nMamu-B:02001,YSEMYRNRAGHTHGNTLYVISHHYTWAEWAYGWY\nMamu-B:02101,YHSMYRNISAHTHGNTLYITYNHYTWDYFADASY\nMamu-B:02102,YHSMYRNISAHTHGNTLYITYNHYTWDYFADASY\nMamu-B:02103,YHSMYRNISAHTHGNTLYITYNHYTWDYFADASY\nMamu-B:02201,YTEMYRNRAANTDGNTAYIWYHFYTWAEQAYEWH\nMamu-B:02301,YTEMYRNNSGHTHADTLYLKYHYYTWAERAYTWY\nMamu-B:02401,YSSMYRERAGNTFVSNLYLWYDSYTWAVQAYTWH\nMamu-B:02501,YYSEYEQRVETADMCTVYFRFYSYTWAYFAYESY\nMamu-B:02601,YHAEYEQIAGNTHVDTLYLTYHSYTWAAQAYRWY\nMamu-B:02602,YHAEYEQIAGNTHVDTLYLTYHSYTWAAQAYRWY\nMamu-B:02701,YHSEYEQNAAHSHVSNLYLKYDYYTWAALAYTWY\nMamu-B:02702,YHSEYEQNAAHTHVSNLYLKYDYYTWAALAYTWY\nMamu-B:02703,YHSEYEQNAAHSHVSNLYLKYDYYTWAALAYTWY\nMamu-B:02801,YGYMYDVRAARTDVDTLYIIYRDYTWAVMAYEWY\nMamu-B:02802,YGYMYDVIAAQTDVDTLYIIYRDYTWAVMAYEWY\nMamu-B:02803,YGYMYDVIAAQTDVDTLYIIYRDYTWAVMAYEWY\nMamu-B:02901,YSEMYEERAANTDVDTLYIWYHSYTWDYFAYTWY\nMamu-B:02902,YSEMYEERAANTDVDTLYIWYHSYTWDYFAYTSH\nMamu-B:03001,YSSEYEQNTAHNHVCTVYLRFNYYTWTYFAYTSH\nMamu-B:03002,YSSEYEQNTAHNHVCTVYLRFNYYTWTYFAYTSH\nMamu-B:03003,YSSEYEQNTAHNHVCTVYLRFNYYTWTYFAYTSH\nMamu-B:03004,YSSEYEQNTAHNHVCTVYLRFNYYTWTYFAYTSH\nMamu-B:03005,YSSEYEQNTAHNHVCTVYLRFNYYTWTYFAYTSH\nMamu-B:03101,YSSMYEQIADLTFVDTAYFWFHFYTWAEWADTWY\nMamu-B:03102,YSSMYEQIADLTFVDTAYFWFHFHTWAEWAHTWY\nMamu-B:03103,YSSVYEQIADLTFVDTAYFWFHFHTWAEWAHTWY\nMamu-B:03201,YSEMYEERAGTTFVCTAYIWYNHYTWAEFAYKSY\nMamu-B:03301,YSEMYRNRAGNTFGNTAYIWYHFYTWAEQAYEWY\nMamu-B:03401,YSEMYEQIAANTEGNTAYLRYHLYTWAEWAYRWY\nMamu-B:03501,YHVMYEEKADNTHVSNLYLRYYFYTWAVLAHPWY\nMamu-B:03601,YYAMYQENMTATDADTLYITYEFYTWAEWAYEWY\nMamu-B:03602,YYAMYQENMTATDADTLYITYEFYTWAEWAYEWY\nMamu-B:03701,YYAMYEQRVARTDVDTLYIIYRDYTWAVRAYTWY\nMamu-B:03801,YHEMYRNRAANTDGNTAYIWYYDYTWAALAYTWY\nMamu-B:03802,YSEMYRNRAANTHGNTAYIKYYDYTWAALAYTWY\nMamu-B:03901,YTELYEERAETTFVSTAYIWYDYYTWAEMAYRWY\nMamu-B:04001,YTEMYEQNSGHTHVNTAYIRYDFYTWAVMAYRWY\nMamu-B:04002,YTEMYEQNSGHTHVNTAYIRYDFYTWAVLAYRWY\nMamu-B:04101,YSEMYEQIAGHTEGNTAYIWYHLYTWAEWAYEWY\nMamu-B:04201,YYAMYEQIADTTFVSNLYLEYDFYTWAVLAYEWY\nMamu-B:04301,YSEMYRNRAGNTEGSTLYFRYDSYTWAERAYTWH\nMamu-B:04401,YHAEYEQNAAHTHVSNLYLKYDYYTWAALAYTWY\nMamu-B:04402,YHAEYEQNAAHTHVSNLYIKYDYYTWAALAYTWY\nMamu-B:04403,YHAEYEQNAAHTHVSNLYLKYDYYTWAALAYTWY\nMamu-B:04404,YHSEYEQNAAHTHVSNLYLKYDYYTWAALAYTWY\nMamu-B:04405,YHAEYEQNAAHTHVSNLYLKYDYYTWAALAYTWY\nMamu-B:04501,YHSEYEEIAANTHGNTLYIKYDYYTWAAQAYEWY\nMamu-B:04502,YHSEYEHIAANTHGNTLYIKYDYYTWAAQAYEWY\nMamu-B:04503,YHSEYEQIAANTHGNTLYIKYDYYTWAAQAYEWY\nMamu-B:04504,YHSEYEQIAANTHGNTLYIKYDYYTWAAQAYEWY\nMamu-B:04601,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04602,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04603,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04604,YSSMYEQLADVTFVGNLYLWSHFYTWAEWAHTWH\nMamu-B:04605,YSAMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04607,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04608,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04609,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04610,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04611,YSSMYEQLAEAIFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04612,YSSMYEQIAEATFVSNLYLWYHFYTWAEWAHTWH\nMamu-B:04613,YSSMYEQLADVTFVGNLYLWSHFYTWAEWAHTWH\nMamu-B:04614,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04615,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04616,YSSMYEQLADVTFVGNLYLWSHFYTWAEWAHTWH\nMamu-B:04617,YSSMYEQLAEATFVGNLYLWYHFYTWAEWAHTWH\nMamu-B:04701,YHSEYREKAAQTDVDTLYIWSRDYTWDYLAYTWY\nMamu-B:04702,YHSMYRNKAAHTDVDTLYIWSRDYTWDYLAYTWY\nMamu-B:04703,YHSMYRNKAAQTDVDTLYIWSRDYTWDYLAYTWY\nMamu-B:04704,YHSMYRNKAAHTDVDTLYIWSRDYTWDYLAYTWY\nMamu-B:04705,YHSEYREKAAQTDVDTLYIWSRDYTWAERAYTWY\nMamu-B:04801,YSAMYEEIADATFVGILYYWYHFYTWVEFAYRWY\nMamu-B:04802,YSSMYEQIADATFVGILYYWVHFYTWVEFAYRWY\nMamu-B:04901,YGVMYEEKADNTHVSNLYFRYYFYTWAVLAHPWY\nMamu-B:05002,YSAMYRENAANTDVNTLYLMHHQYTWDYFAYEWY\nMamu-B:05101,YGYTYEERAARTDADILYLWAHSYTWTYFAYPWY\nMamu-B:05102,YGYTYEERAARTDADTLYLWAHSYTWTYFAYPWY\nMamu-B:05103,YGYTYEERAARTDADILYLWAHSYTWTYFAYPWY\nMamu-B:05104,YGYTYEERAARTDADTLYLWAHSYTWTYFAYPWY\nMamu-B:05105,YGSTYEERAARTDADTLYVWAHSYTWTYFAYPWY\nMamu-B:05201,YSEMYEERAGNTFVNTAYIGYHHYTWAVLAYRWY\nMamu-B:05301,YGAMYEQLAARTDVGTLYYWLHSYTWDYFAYEWY\nMamu-B:05302,YGAMYEQLAARTDVGTLYYWLHSYTWDYFAYEWY\nMamu-B:05401,YSSMYEQLADISFVGNLYLWYHFYTWAEMAHTWH\nMamu-B:05501,YSAMYEEKADISFVSNLYYWVHYYTWVGFAYRSY\nMamu-B:05601,YSAMYEQRVEATFGNTAYFWFEYYTWAEMAYEWY\nMamu-B:05602,YSAMYEQRVEATFGNTAYFWFDSYTWAELAYEWY\nMamu-B:05701,YHAEYEQIAGNTHVDTLYLTYHSYTWAAQAYRWY\nMamu-B:05702,YHAEYEQIAGNTHVDTLYLTYHSYTWAAQAYRWY\nMamu-B:05802,YSEMYRNNAGHTFGNTAYLWYHFYTWAEQAYEWY\nMamu-B:05901,YGAMYEQIAANTDVDTLYIWYHDYTWDYFAYTWY\nMamu-B:06001,YSSMYEQLADFSFVGNLYLWYHFYTWAEVAHTWH\nMamu-B:06002,YSSMYEQLADFSFVGNLYLWYHFYTWAEVAHTWH\nMamu-B:06003,YSSMYEQLADISFVGNLYIWYHFYTWAEMAHTWH\nMamu-B:06101,YSAMYRESAANTDVNTLYLMHHQYTWDYFAYEWY\nMamu-B:06102,YSAMYRENAANTDVNTLYLMHHQYTWDYFAYEWY\nMamu-B:06103,YSAMYRENAANTDVNTLYLMHHQYTWDYFAYEWY\nMamu-B:06301,YSAMYEQLADISFVGNLYLWYHFYTWAEMAYTWH\nMamu-B:06302,YSAMYEQLADISFVGNLYLWYHFYTWAEMAYTWH\nMamu-B:06401,YGYVYEQIAARTDADTLYIWFHSYTWDYQAYEWY\nMamu-B:06402,YGYVYEQIAARADADTLYIWFHSYTWDYQAYEWY\nMamu-B:06501,YSEMYEERAGHTEGNTAYLWYDLYTWAEWAYEWY\nMamu-B:06502,YSEMYEQRAGHTEGNTAYLWYDLYTWAEWAYEWY\nMamu-B:06503,YSEMYEQRAGHTEGNTAYLWYDLYTWAEWAYEWY\nMamu-B:06601,YSYMYEEKAARTDVDTLYIIYRDYTWAVWAYTWY\nMamu-B:06701,YSEMYEEIAANTDGNTLYVISHHYTWAERAYGWY\nMamu-B:06702,YSEMYEEIAANTDGNTLYVISHHYTWAERAYGWY\nMamu-B:06801,YSEMYRNRAGHTHGNTLYVISHHYTWAEWAYGWY\nMamu-B:06802,YSEMYRNRAGHTHGNTLYVISHHYTWAEWAYGWY\nMamu-B:06803,YSEMYRNRAGHTHGNTLYVISHHYTWAEWAYGWY\nMamu-B:06804,YSEMYRNSAGHTHGNTLYVISHHYTWAEWAYGWY\nMamu-B:06805,YSEMYRNRAGHTHGNTLYVICHHYTWAEWAYGWY\nMamu-B:06901,YTSMYEERAGHTDGGILYYRSHRYTWAVKAYEWY\nMamu-B:06902,YTAMYEQRAGHTDGGNLYYRSYRYTWAVKAYEWY\nMamu-B:06903,YTAMYEARVGHTDGGILYYKSHRYTWAVKAYEWH\nMamu-B:06904,YTSMYEARAGHTDGGNLYYRSHRYTWAVWAYEWH\nMamu-B:07001,YGYVYEQLAARTDADTLYIWFHSYTWDYMAYEWY\nMamu-B:07002,YGYVYEQIAARTDADTLYIWFHSYTWDYMAYEWY\nMamu-B:07101,YSSMYRNKAAHTDVDTLYIMYRDYTWAVRAYLSY\nMamu-B:07201,YIAMYEEKVETTFVSNLYYWYHFYTWAVMAYQCY\nMamu-B:07202,YIAMYEEKVETTFVSNLYYWYHFYTWAVMAYQCY\nMamu-B:07301,YSEMYEETAANTEVNTAYIRYRDYTWDYMVYRWY\nMamu-B:07401,YGYAYEQLAARTDVDTLYIWSRDYTWAEWAYEWH\nMamu-B:07402,YGYAYEQLAARTDVDTLYIWSRDYTWAEWAYEWH\nMamu-B:07501,YSSMYEEKADVSFVGTLYYWVHYYTWAEFAYPWY\nMamu-B:07502,YSSMYEEKADVSFVGTLYYWVHYYTWVEFAYPWY\nMamu-B:07601,YHSMYEQIAANTDVDTLYITYRDYTWAEWAYEWY\nMamu-B:07602,YHSMYEQIAGNTHVDTLYITYRDYTWAEWAYEWY\nMamu-B:07701,YHSEYEQIAGNTDADTLYLWYRDYTWAEWAYEWH\nMamu-B:07702,YHSEYEQIADTTDANTLYLWYRDYTWAEWAYEWH\nMamu-B:07801,YSAMYRENAANTDVNTLYLMHHQYTWDYFAYEWY\nMamu-B:07901,YSSMYEQLANIFFVGNLYLWYHFYTWAEMAHTWY\nMamu-B:07902,YSSMYEQLANIFFVGTLYLWYHFYTWAEMAHTWY\nMamu-B:07903,YSSMYEQLADISFVGNLYLWYHFYTWAEMAHTWY\nMamu-B:08001,YHYMYEEIAANTDADTLYYWSRDYTWAYFAYEWY\nMamu-B:08101,YHEMYRNRAGNTEGNTLYIWYDSYTWAEMAYEWH\nMamu-B:08102,YHEMYRNRAGNTEGNTLYIWYDSYTWAEMAYEWH\nMamu-B:08201,YHSMYEQIAEATEGNTLYLWYDSYTWAAQAYEWY\nMamu-B:08202,YHSMYEQIAEATFGNTLYLWYDSYTWAAQAYEWY\nMamu-B:08301,YSEMYEQNSARTDVDTLYITYRDYTWAAQAYRSY\nMamu-B:08401,YYAMYEQNAAHSHVSNLYLKYDYYTWAALAYTWY\nMamu-B:08501,YGAMYEQIAANTFVGTLYLRYESYTWDYLAYTWY\nMamu-B:08502,YGAMYEQIAETTFVGTLYLRYESYTWDYLAYTWY\nMamu-B:08601,YFAMYEQRAAHTDESNLYIWYRDYTWAEWAYEWY\nMamu-B:08602,YFAMYEQRAAHTDESNLYIWYRDYTWAEWAYEWY\nMamu-B:08603,YFAMYEQRAAHTDESNLYIWYRDYTWAEWAYEWY\nMamu-B:08701,YSAMYEEKAGHTDENTLYLRSYRYTWAARAYRWY\nMamu-B:08801,YYAMYEEKVETTFVSILYYWYHSYTWAVMAYQCY\nMamu-B:08901,YHSEYEQIAGNTDENTLYIWYDSYTWAAQAYEWY\nMamu-B:09001,YHYMYEERAANTDVDTLYIWSRDYTWTYFAYTWY\nMamu-B:09101,YGYMYEEKAARTDVNTLYIIYDYYTWAERAYTWY\nMamu-B:09102,YGYMYEEKAARTDVNTLYIIYRDYTWAERAYTWY\nMamu-B:09201,YGAAYEQIAANTDVGTLYIWFHSYTWDYFAYEWY\nMamu-B:09301,YHSMYRNTAGNTDGSNLYLTYEYYTWAEWAYTWY\nMamu-B:09401,YHEMYEANAARTDVDTLYIWARDYTWDYLAYTWY\nMamu-B:09501,YSSMYEAIAARTDVDTLYITYRDYTWDYFAYRWY\nMamu-B:09601,YSSMYEQLADISFVGNLYLWYHFYTWAEMAYTWH\nMamu-B:09701,YSAMYEQLADNSFVSNLYLWSHFYTWAEMAYTWH\nMamu-B:09801,YSVMYEQRVDATFVSNLYLTSYQYTWAVWAYECY\nMamu-B:09901,YSTMYEQLADISFVSNLYITYRDYTWDYFADRWY\nMamu-B:10001,YSEMYRNNAGNTFGNTVYLWYHLYTWAEQAYEWH\nMamu-B:10101,YSYMYEEKAGHTDVDTLYIIYRDYTWAVDAYPWY\nPatr-A0101,YFAMYQESAAHTDVDTLYIIYRDYTWAAQAYTWY\nPatr-A01:01,YFAMYQESAAHTDVDTLYIIYRDYTWAAQAYTWY\nPatr-A0201,YFAMYEESAAHTDVDTLYIIYRDYTWAARAYTWY\nPatr-A02:01,YFAMYEESAAHTDVDTLYIIYRDYTWAARAYTWY\nPatr-A0301,YYAMYQENMASTDVDTLYIIYRDYTWAALAYRWY\nPatr-A0302,YYAMYQENMASTDVDTLYIIYRDYTWAALAYRGY\nPatr-A03:01,YYAMYQENMASTDVDTLYIIYRDYTWAALAYRWY\nPatr-A03:02,YYAMYQENMASTDVDTLYIIYRDYTWAALAYRGY\nPatr-A0401,YSAMYEESVASTDVDTLYILFRDYTWAALAYTWY\nPatr-A0402,YSAMYEESVASTDVDTLYILFRDYTWAAWAYTGY\nPatr-A0404,YSAMYEESVAQTDVDTLYILFRDYTWAAWAYTGY\nPatr-A04:01,YSAMYEESVASTDVDTLYILFRDYTWAALAYTWY\nPatr-A04:02,YSAMYEESVASTDVDTLYILFRDYTWAAWAYTGY\nPatr-A04:04,YSAMYEESVAQTDVDTLYILFRDYTWAAWAYTGY\nPatr-A0501,YSAMYEESVAFTDVDTLYILFRDYTWAAWAYTGY\nPatr-A05:01,YSAMYEESVAFTDVDTLYILFRDYTWAAWAYTGY\nPatr-A0601,YSAMYQESVASTDANTLYIIYRDYTWAAWAYTGY\nPatr-A0602,YSAMYQESVAFTDANTLYIIYRDYTWAAWAYTGY\nPatr-A06:01,YSAMYQESVASTDANTLYIIYRDYTWAAWAYTGY\nPatr-A06:02,YSAMYQESVAFTDANTLYIIYRDYTWAAWAYTGY\nPatr-A0701,YSAMYRESVAGIYANTLYILFELYTWVAQAYRSY\nPatr-A07:01,YSAMYRESVAGIYANTLYILFELYTWVAQAYRSY\nPatr-A0801,YSAMYQESVAGIYANTLYIIFELYTWAARAYTWY\nPatr-A0802,YSAMYQESVAGIYANTLYILFELYTWAARAYTWY\nPatr-A0803,YSAMYQESVAGIYANTLYIIFELYTWAARAYTWY\nPatr-A08:01,YSAMYQESVAGIYANTLYIIFELYTWAARAYTWY\nPatr-A08:02,YSAMYQESVAGIYANTLYILFELYTWAARAYTWY\nPatr-A08:03,YSAMYQESVAGIYANTLYIIFELYTWAARAYTWY\nPatr-A0901,YSAMYEESVASTDVDTLYIIYRYYTWAALAYTWY\nPatr-A0902,YSAMYEESVASTDVDTLYIIYRDYTWAALAYTWY\nPatr-A09:01,YSAMYEESVASTDVDTLYIIYRYYTWAALAYTWY\nPatr-A09:02,YSAMYEESVASTDVDTLYIIYRDYTWAALAYTWY\nPatr-A1001,YSAMYQENMAFIYANTLYILFEHYTWAAWAYRGY\nPatr-A10:01,YSAMYQENMAFIYANTLYILFEHYTWAAWAYRGY\nPatr-A1101,YYAMYQENMASTDANTLYIIYRDYTWAARAYTGY\nPatr-A11:01,YYAMYQENMASTDANTLYIIYRDYTWAARAYTGY\nPatr-A1201,YSAMYEESVASTDANTLYILFEHYTWAALAYTWY\nPatr-A12:01,YSAMYEESVASTDANTLYILFEHYTWAALAYTWY\nPatr-A1301,YSAMYEESVAFTDANTLYILFRYYTWAAQAYTWY\nPatr-A13:01,YSAMYEESVAFTDANTLYILFRYYTWAAQAYTWY\nPatr-A1401,YSAMYEESVAFTDANTLYILFEHYTCAALAYTWY\nPatr-A14:01,YSAMYEESVAFTDANTLYILFEHYTCAALAYTWY\nPatr-A1501,YSAMYEESVAFTDANTLYILFEHYTWAAQAYTWY\nPatr-A1502,YSAMYEESVAFTDANTLYILFEHYTWAADAYTWY\nPatr-A15:01,YSAMYEESVAFTDANTLYILFEHYTWAAQAYTWY\nPatr-A15:02,YSAMYEESVAFTDANTLYILFEHYTWAADAYTWY\nPatr-A1601,YYAMYEESAAHTDVDTLYILYRDYTWAVLAYLGY\nPatr-A16:01,YYAMYEESAAHTDVDTLYILYRDYTWAVLAYLGY\nPatr-A1701,YYAMYEESTASTNVDTLYIIYRDYTWAVDAYTWY\nPatr-A1702,YYAMYEESAASTNVDTLYIIYRDYTWAVDAYTWY\nPatr-A1703,YYAMYQENMASTDVDTLYIIYRDYTWAVDAYTWY\nPatr-A17:01,YYAMYEESTASTNVDTLYIIYRDYTWAVDAYTWY\nPatr-A17:02,YYAMYEESAASTNVDTLYIIYRDYTWAVDAYTWY\nPatr-A17:03,YYAMYQENMASTDVDTLYIIYRDYTWAVDAYTWY\nPatr-A1801,YSAMYEESVAFTDVDTLYILFELYTWAEWAYRWY\nPatr-A18:01,YSAMYEESVAFTDVDTLYILFELYTWAEWAYRWY\nPatr-A2301,YSAMYRESVASTDANTLYILFRDYTWVAQAYRSY\nPatr-A23:01,YSAMYRESVASTDANTLYILFRDYTWVAQAYRSY\nPatr-A2401,YSAMYRESVAGTDANTLYIIYRDYTWAAWAYTGY\nPatr-A24:01,YSAMYRESVAGTDANTLYIIYRDYTWAAWAYTGY\nPatr-B0101,YYTMYRENMASTDENIAYWTYGYYTWAERAYTWY\nPatr-B0102,YYTMYRENMASTDENIAYWTYGYYTWAERAYTWY\nPatr-B01:01,YYTMYRENMASTDENIAYWTYGYYTWAERAYTWY\nPatr-B01:02,YYTMYRENMASTDENIAYWTYGYYTWAERAYTWY\nPatr-B0201,YYSEYREISTNTYESNLYIRYEYYTWAWLAYTWY\nPatr-B0203,YYSEYREISTNTYESNLYIRYEYYTWAWLAYTWY\nPatr-B02:01,YYSEYREISTNTYESNLYIRYEYYTWAWLAYTWY\nPatr-B02:03,YYSEYREISTNTYESNLYIRYEYYTWAWLAYTWY\nPatr-B0301,YDTMYRENVASTDENIAYWTFYYYTWAALAYTWY\nPatr-B0302,YDTMYRENVASTDENIAYWTFYYYTWAALAYTWY\nPatr-B03:01,YDTMYRENVASTDENIAYWTFYYYTWAALAYTWY\nPatr-B03:02,YDTMYRENVASTDENIAYWTFYYYTWAALAYTWY\nPatr-B0401,YYTKYREISTNTYVGNLYWTFRYYTWAVLAYTWY\nPatr-B0402,YYTKYREISTNTYVGNLYWTFRYYTWAVLAYTWY\nPatr-B04:01,YYTKYREISTNTYVGNLYWTFRYYTWAVLAYTWY\nPatr-B04:02,YYTKYREISTNTYVGNLYWTFRYYTWAVLAYTWY\nPatr-B0501,YYSEYREISTNTYESNLYIRYEYYTWAWLAYTWY\nPatr-B0502,YYSEYREISTNTYESNLYIRYEYYTWARLAYTWY\nPatr-B05:01,YYSEYREISTNTYESNLYIRYEYYTWAWLAYTWY\nPatr-B05:02,YYSEYREISTNTYESNLYIRYEYYTWARLAYTWY\nPatr-B0601,YYSMYRENVASTDGSNLYWTYDYYTWAVWAYLWY\nPatr-B06:01,YYSMYRENVASTDGSNLYWTYDYYTWAVWAYLWY\nPatr-B0701,YLTMYRENVASTYENIAYLTYRFYTWAVHAYLWY\nPatr-B0702,YLTMYRENVAFTYENIAYITFRYYTWAVHAYLWY\nPatr-B07:01,YLTMYRENVASTYENIAYLTYRFYTWAVHAYLWY\nPatr-B07:02,YLTMYRENVAFTYENIAYITFRYYTWAVHAYLWY\nPatr-B0801,YYAMYREISTNTYVGNLYWTYRFYTWAWLAYTWY\nPatr-B0802,YYAMYREISTNTYVGNLYWTYRFYTWAWLAYTWY\nPatr-B08:01,YYAMYREISTNTYVGNLYWTYRFYTWAWLAYTWY\nPatr-B08:02,YYAMYREISTNTYVGNLYWTYRFYTWAWLAYTWY\nPatr-B0901,YYTMYRENMASTDENIAYIRYYYYTWAARAYTWY\nPatr-B09:01,YYTMYRENMASTDENIAYIRYYYYTWAARAYTWY\nPatr-B1001,YYTMYRENMASTDENIAYWTYGYYTWAERAYTWY\nPatr-B10:01,YYTMYRENMASTDENIAYWTYGYYTWAERAYTWY\nPatr-B1101,YYSEYRNIYAQTDVGNLYWTYDYYTWAERAYLWY\nPatr-B1102,YYSEYRNIYAQTDVGNLYWTYDYYTWAVWAYLWY\nPatr-B11:01,YYSEYRNIYAQTDVGNLYWTYDYYTWAERAYLWY\nPatr-B11:02,YYSEYRNIYAQTDVGNLYWTYDYYTWAVWAYLWY\nPatr-B1202,YYSEYRNICAQTDGSNLYLRYYDYTWAVHAYLWY\nPatr-B12:02,YYSEYRNICAQTDGSNLYLRYYDYTWAVHAYLWY\nPatr-B1301,YYSEYRNIYAQTDVSNLYLSYEYYTWAVRAYTWY\nPatr-B13:01,YYSEYRNIYAQTDVSNLYLSYEYYTWAVRAYTWY\nPatr-B1401,YYAMYRNGITQTDENTLYLSYDYYTWAVLAYTWY\nPatr-B14:01,YYAMYRNGITQTDENTLYLSYDYYTWAVLAYTWY\nPatr-B1601,YYTKYREISTNTDVSNLYWTFRYYTWAVLAYTWY\nPatr-B1602,YYTKYREISTNTDESNLYWTFRYYTWAVLAYTWY\nPatr-B16:01,YYTKYREISTNTDVSNLYWTFRYYTWAVLAYTWY\nPatr-B16:02,YYTKYREISTNTDESNLYWTFRYYTWAVLAYTWY\nPatr-B1701,YYSVYREIFTNTDVSNLYLTYYYYSFAALAYTWY\nPatr-B1702,YYSVYREIFTNTDGSNLYLTYYYYSFAALAYTWY\nPatr-B1703,YFTVYRQISTNTDGSNLYLTYYYYSFAALAYTWY\nPatr-B17:01,YYSVYREIFTNTDVSNLYLTYYYYSFAALAYTWY\nPatr-B17:02,YYSVYREIFTNTDGSNLYLTYYYYSFAALAYTWY\nPatr-B17:03,YFTVYRQISTNTDGSNLYLTYYYYSFAALAYTWY\nPatr-B1801,YFTMYRENVASTDENIAYIRYYSYTWAERAYTWY\nPatr-B18:01,YFTMYRENVASTDENIAYIRYYSYTWAERAYTWY\nPatr-B1901,YYSEYRNIYAQTDENIAYLTYYDYTWAELAYTWY\nPatr-B19:01,YYSEYRNIYAQTDENIAYLTYYDYTWAELAYTWY\nPatr-B2001,YYTMYRENVASTDENIAYWTYGFYTWAVLAYTWY\nPatr-B20:01,YYTMYRENVASTDENIAYWTYGFYTWAVLAYTWY\nPatr-B2101,YYTMYRQISTNTYESNLYITYRFYTWAWLAYLWY\nPatr-B21:01,YYTMYRQISTNTYESNLYITYRFYTWAWLAYLWY\nPatr-B2201,YYTKYREISTNTYESNLYLTFDYYTWAALAYEWY\nPatr-B2202,YYTKYREISTNTYENIAYLTFDYYTWAALAYEWY\nPatr-B22:01,YYTKYREISTNTYESNLYLTFDYYTWAALAYEWY\nPatr-B22:02,YYTKYREISTNTYENIAYLTFDYYTWAALAYEWY\nPatr-B2301,YYSEYREISTNTYENIAYLTYRFYTWAWWAYTWY\nPatr-B2302,YYSEYREISTNTYENIAYLTYRFYTWAWLAYLWY\nPatr-B2303,YYSEYREISTNTYENTLYLTYRYYTWAWLAYLWY\nPatr-B23:01,YYSEYREISTNTYENIAYLTYRFYTWAWWAYTWY\nPatr-B23:02,YYSEYREISTNTYENIAYLTYRFYTWAWLAYLWY\nPatr-B23:03,YYSEYREISTNTYENTLYLTYRYYTWAWLAYLWY\nPatr-B2401,YYTKYREISTNTDENTLYWTFRFYTWAVRAYTWY\nPatr-B2402,YYTKYREISTNTDENTLYWTFRYYTWAVRAYTWY\nPatr-B24:01,YYTKYREISTNTDENTLYWTFRFYTWAVRAYTWY\nPatr-B24:02,YYTKYREISTNTDENTLYWTFRYYTWAVRAYTWY\nPatr-B2501,YLAMYRENMAFTYENIAYLTYRFYTWAERAYLWY\nPatr-B25:01,YLAMYRENMAFTYENIAYLTYRFYTWAERAYLWY\nPatr-B2601,YYTMYRENVAFTDENTLYLTYEFYTWAEHAYLGY\nPatr-B26:01,YYTMYRENVAFTDENTLYLTYEFYTWAEHAYLGY\nPatr-B2701,YYTMYRENVASTDGSNLYLTYYYYTWAVWAYLWY\nPatr-B27:01,YYTMYRENVASTDGSNLYLTYYYYTWAVWAYLWY\nPatr-B2801,YYTEYRNIFAQTDVGNLYWTYYHYTWAARAYTWY\nPatr-B28:01,YYTEYRNIFAQTDVGNLYWTYYHYTWAARAYTWY\nPatr-B2901,YYAMYGNGVTQTDESNLYWTYGYYTWAVLAYTWY\nPatr-B29:01,YYAMYGNGVTQTDESNLYWTYGYYTWAVLAYTWY\nPatr-B3001,YYTKYREISTNTYENIAYIRYDYYTWAWHAYEWY\nPatr-B30:01,YYTKYREISTNTYENIAYIRYDYYTWAWHAYEWY\nPatr-B3501,YYAMYRNIYAQTDESNLYIRYEYYTWAWLAYTWY\nPatr-B35:01,YYAMYRNIYAQTDESNLYIRYEYYTWAWLAYTWY\nPatr-B3601,YYAMYRNGVAQTDENIAYIRYEYYTWAWLAYTWY\nPatr-B36:01,YYAMYRNGVAQTDENIAYIRYEYYTWAWLAYTWY\nPatr-B3701,YYAMYRNGVAQTDENIAYIRYYYYTWAARAYTWY\nPatr-B37:01,YYAMYRNGVAQTDENIAYIRYYYYTWAARAYTWY\nPatr-C0201,YDSGYREKYRQADVSNLYLWYGSYTWAELAYKSY\nPatr-C0202,YDSGYREKYRQADVSNLYLWYGSYTWAELAYKWY\nPatr-C0203,YDSGYREKYRQADVNKLYLRSGYYTWAELAYKWY\nPatr-C0204,YDSGYREKYRQADVSNLYLRSGSYTWAELAYKWY\nPatr-C0205,YDSGYREKYRQADVSNLYLRSGSYTWAELAYKWY\nPatr-C0206,YDSGYREKYRQADVSNLYLWYGSYTWAELAYKWY\nPatr-C02:01,YDSGYREKYRQADVSNLYLWYGSYTWAELAYKSY\nPatr-C02:02,YDSGYREKYRQADVSNLYLWYGSYTWAELAYKWY\nPatr-C02:03,YDSGYREKYRQADVNKLYLRSGYYTWAELAYKWY\nPatr-C02:04,YDSGYREKYRQADVSNLYLRSGSYTWAELAYKWY\nPatr-C02:05,YDSGYREKYRQADVSNLYLRSGSYTWAELAYKWY\nPatr-C02:06,YDSGYREKYRQADVSNLYLWYGSYTWAELAYKWY\nPatr-C0301,YYAGYREKYRQADVNKLYLSYGYYTWAERAYLWY\nPatr-C0302,YYAGYREKYRQADVNKLYLSYELYTWAERAYLWY\nPatr-C0303,YYAGYREKYRQADVNKLYLSYELYTWAERAYLWY\nPatr-C0304,YYAGYREKYRQADVSNLYLKYEFYTWAERAYLWY\nPatr-C03:01,YYAGYREKYRQADVNKLYLSYGYYTWAERAYLWY\nPatr-C03:02,YYAGYREKYRQADVNKLYLSYELYTWAERAYLWY\nPatr-C03:03,YYAGYREKYRQADVNKLYLSYELYTWAERAYLWY\nPatr-C03:04,YYAGYREKYRQADVSNLYLKYEFYTWAERAYLWY\nPatr-C0401,YYAGYREKYRQADVNKLYLKYEFYTWAALAYLWY\nPatr-C04:01,YYAGYREKYRQADVNKLYLKYEFYTWAALAYLWY\nPatr-C0501,YYAGYREKYRQADVSNLYLWYDYYTLAALAYTWY\nPatr-C0502,YYAGYREKYRQADVSNLYLSYDYYTLAALAYTWY\nPatr-C05:01,YYAGYREKYRQADVSNLYLWYDYYTLAALAYTWY\nPatr-C05:02,YYAGYREKYRQADVSNLYLSYDYYTLAALAYTWY\nPatr-C0601,YYAGYREKYRQADVSNLYLSYDYYTLAALAYLWY\nPatr-C06:01,YYAGYREKYRQADVSNLYLSYDYYTLAALAYLWY\nPatr-C0701,YYAGYREKYRQADVSNLYLSYDYYTWAAHAYLWY\nPatr-C07:01,YYAGYREKYRQADVSNLYLSYDYYTWAAHAYLWY\nPatr-C0801,YYAGYREKYRQADVSNLYLSYDYYTLAALAYLWY\nPatr-C08:01,YYAGYREKYRQADVSNLYLSYDYYTLAALAYLWY\nPatr-C0901,YYAGYREKYRQADVNKLYLKYEFYTLAAWAYLWY\nPatr-C0902,YYAGYRENYRQADVNKLYLKYEFYTLAAWAYLWY\nPatr-C0903,YYAGYRENYRQADVNKLYLKYEFYTLAAWAYLWY\nPatr-C0904,YYAGYREIYRQADVNKLYLKYEFYTLAAWAYLWY\nPatr-C0905,YYAGYRQIYRQADVNKLYLKYEFYTLAALAYTWY\nPatr-C09:01,YYAGYREKYRQADVNKLYLKYEFYTLAAWAYLWY\nPatr-C09:02,YYAGYRENYRQADVNKLYLKYEFYTLAAWAYLWY\nPatr-C09:03,YYAGYRENYRQADVNKLYLKYEFYTLAAWAYLWY\nPatr-C09:04,YYAGYREIYRQADVNKLYLKYEFYTLAAWAYLWY\nPatr-C09:05,YYAGYRQIYRQADVNKLYLKYEFYTLAALAYTWY\nPatr-C1001,YDSGYRQIYRQADVNKLYIRFSSYTWAALAYTWH\nPatr-C10:01,YDSGYRQIYRQADVNKLYIRFSSYTWAALAYTWH\nPatr-C1101,YYAGYREKYRQADVSNLYLSYDYYTLAARAYLWY\nPatr-C11:01,YYAGYREKYRQADVSNLYLSYDYYTLAARAYLWY\nPatr-C1201,YYAGYREKYRQADVNKLYLSYDYYTLAALAYLWY\nPatr-C12:01,YYAGYREKYRQADVNKLYLSYDYYTLAALAYLWY\nPatr-C1301,YDSGYQEKYRQADVNKLYLRSEFYTWAALAYLWY\nPatr-C1302,YDSGYQEKYRQADVNKLYLRSEFYTWAALAYLWY\nPatr-C13:01,YDSGYQEKYRQADVNKLYLRSEFYTWAALAYLWY\nPatr-C13:02,YDSGYQEKYRQADVNKLYLRSEFYTWAALAYLWY\nPatr-C1501,YYAGYREKYRQADVSNLYLWYDYYTLAALAYLWY\nPatr-C15:01,YYAGYREKYRQADVSNLYLWYDYYTLAALAYLWY\nPatr-C1601,YYAGYREKYRQADVSNLYLWYDYYTWAERAYLWY\nPatr-C16:01,YYAGYREKYRQADVSNLYLWYDYYTWAERAYLWY\nSLA-1-CHANGDA,YSAMYRNNVGSINVNTLCLWYDFYPWAAWSYTSY\nSLA-1-HB01,YYAMYREIQDTTYGNTLYLSYSDYTWAVESYLSY\nSLA-1-HB02,YYAMYREIQDTTYGTTLYLSYSDYTWAVESYLSY\nSLA-1-HB03,YYAMYREIQDTTYGTTLYLSYSDYTWAVESYLSY\nSLA-1-HB04,YYAMYREIQDTTYGNTLYLSYSDYTWAVESYLSY\nSLA-1-LWH,YYEMYRERVEETFVNTAYLRYRDYSWAEMSYLGY\nSLA-1-TPK,HYIMYREISETTYVSNLYYNYSYYSWAAWSYRWY\nSLA-1-YC,YSAMYEEKVDNTYVNTLYLSFRDYTWAAMSYLSY\nSLA-1-YDL01,YYAMYRENVETTYVGTLYLSYRDYTWAERSYLSY\nSLA-1-YTH,YSAMYEEKVDTTYVNTLYLSSHYYSWAVRSYLSY\nSLA-10101,YYAMYREKQDTTYVGNLYYSYRYYTWAVRSYLSY\nSLA-10201,YYAMYEEKVDNTLVGTLYLSFRDYSWAEMSYRGY\nSLA-10202,YYAMYEEKVDNTLVGTLYLSFRDYSWAEMSYRGY\nSLA-10401,YYAMYRENVETTYVGTLYLSYRDYTWAERSYLSY\nSLA-104:01,YYAMYRENVETTYVGTLYLSYRDYTWAERSYLSY\nSLA-10501,YSAMYREKQDTTFVNTAYLSYSDYTWAVMSYLSY\nSLA-10601,YYAMYEEKVDNTYVGTLYLSYRDYSRAAESYLSY\nSLA-10701,YYAEYRNIYETTYVNTLYIIYRDYTWAVLSYRGY\nSLA-10702,YYAEYRNIYETTYVNTLYIIYRDYTWAVLSYRGY\nSLA-107:01,YYAEYRNIYETTYVNTLYIIYRDYTWAVLSYRGY\nSLA-107:02,YYAEYRNIYETTYVNTLYIIYRDYTWAVLSYRGY\nSLA-10801,YYIMYREISETTYVGTLYLRYRDYSWAEMSYLGY\nSLA-11101,YSEMYRERVGNTFGSNLYLWYSFYSWAARSYTWY\nSLA-11201,YYEMYEEKVETINVDTLYLSFRDYTWAEWSYLSY\nSLA-11301,YYAMYEEKVDNTYVGTLYLSFRDYTWAVRSYLSY\nSLA-1:0101,YYAMYREKQDTTYVGNLYYSYRYYTWAVRSYLSY\nSLA-1:0201,YYAMYEEKVDNTLVGTLYLSFRDYSWAEMSYRGY\nSLA-1:0202,YYAMYEEKVDNTLVGTLYLSFRDYSWAEMSYRGY\nSLA-1:0401,YYAMYRENVETTYVGTLYLSYRDYTWAERSYLSY\nSLA-1:0501,YSAMYREKQDTTFVNTAYLSYSDYTWAVMSYLSY\nSLA-1:0601,YYAMYEEKVDNTYVGTLYLSYRDYSRAAESYLSY\nSLA-1:0701,YYAEYRNIYETTYVNTLYIIYRDYTWAVLSYRGY\nSLA-1:0702,YYAEYRNIYETTYVNTLYIIYRDYTWAVLSYRGY\nSLA-1:0801,YYIMYREISETTYVGTLYLRYRDYSWAEMSYLGY\nSLA-1:1101,YSEMYRERVGNTFGSNLYLWYSFYSWAARSYTWY\nSLA-1:1201,YYEMYEEKVETINVDTLYLSFRDYTWAEWSYLSY\nSLA-1:1301,YYAMYEEKVDNTYVGTLYLSFRDYTWAVRSYLSY\nSLA-1:es11,YSAMYRNNVGSINVNTLYLWYDFYSWAAWSYTSY\nSLA-2-YDL02,YYAMYRENVETTYVGTLYLSYRDYTWAERSYLSY\nSLA-20101,YSAMYRENVGSTDVNTLYISYSDYTWAVMSYLGY\nSLA-20102,YSAMYRENVGSTDVNTLYISYRDYTWAVESYTWY\nSLA-20201,YYTEYREISETTYVNTLYLRYSFYSFAVESYLGY\nSLA-20202,YYTEYREISETTYVNTLYLRYSFYSFAVESYLGY\nSLA-20301,YYAMYREIQDTTYVDTLYLSYRFYSWAAESYRWY\nSLA-20302,YYAMYREIQDTTYVDTLYLRYHDYSWAVLSYRSY\nSLA-20401,YDEMYRNNAGNIYGNTAYIIYSDYTWAERSYTWY\nSLA-20401,YDEMYRNNAGNIYGNTAYIIYSDYTWAERSYTWY\nSLA-20402,YDEMYRNNAGNIYGNTAYLIYSDYTWAVWSYTWY\nSLA-204:01,YDEMYRNNAGNIYGNTAYIIYSDYTWAERSYTWY\nSLA-20501,YYAMYEENAGSTFVNTAYFWYSYYTWAVRSYLWY\nSLA-20502,YYAMYRNNARSTFVNTAYFSYRYYTWAVESYLSY\nSLA-205:02,YYAMYRNNARSTFVNTAYFSYRYYTWAVESYLSY\nSLA-20601,YHTKYREISDYRYVGTLYYRYDFYTWAAESYTWY\nSLA-20701,YSAMYRENVGSTDVSNLYLVYRFYSFAVESYLGY\nSLA-21001,YYIMYREISDNINVNTLYLRYDDYSRAEMSYRWY\nSLA-21002,YYIMYREISETNYVDTLYLRYRDYTWAVESYRWY\nSLA-21201,YYAMYRENVGSTDVNTLYFWYSFYSWAVWSYLSY\nSLA-2:0101,YSAMYRENVGSTDVNTLYISYSDYTWAVMSYLGY\nSLA-2:0102,YSAMYRENVGSTDVNTLYISYRDYTWAVESYTWY\nSLA-2:0201,YYTEYREISETTYVNTLYLRYSFYSFAVESYLGY\nSLA-2:0202,YYTEYREISETTYVNTLYLRYSFYSFAVESYLGY\nSLA-2:0301,YYAMYREIQDTTYVDTLYLSYRFYSWAAESYRWY\nSLA-2:0302,YYAMYREIQDTTYVDTLYLRYHDYSWAVLSYRSY\nSLA-2:0401,YDEMYRNNAGNIYGNTAYIIYSDYTWAERSYTWY\nSLA-2:0402,YDEMYRNNAGNIYGNTAYLIYSDYTWAVWSYTWY\nSLA-2:0501,YYAMYEENAGSTFVNTAYFWYSYYTWAVRSYLWY\nSLA-2:0502,YYAMYRNNARSTFVNTAYFSYRYYTWAVESYLSY\nSLA-2:0601,YHTKYREISDYRYVGTLYYRYDFYTWAAESYTWY\nSLA-2:0701,YSAMYRENVGSTDVSNLYLVYRFYSFAVESYLGY\nSLA-2:1001,YYIMYREISDNINVNTLYLRYDDYSRAEMSYRWY\nSLA-2:1002,YYIMYREISETNYVDTLYLRYRDYTWAVESYRWY\nSLA-2:1101,YYAEYRNIYETIYGNTAYFSYSFYTWAARSYLSY\nSLA-2:1201,YYAMYRENVGSTDVNTLYFWYSFYSWAVWSYLSY\nSLA-2:CDY.AA,YYAMYREIQDNTFVNTAYFSYSYYTWAEWSYLGY\nSLA-2:HB01,YYIMYREISETNYVDTLYIRYRDYTWVEESYRWY\nSLA-2:LWH.AA,YYTMYREKVETTYVGTLYISFHDYTWAAWSYLGY\nSLA-2:TPK.AA,YYTEYRNIYETIYVGNLYFSYSDYTRAAWSYTGY\nSLA-2:YC.AA,YYAMYEEKVGSTFVNTAYFWYSYYSWAVESYLSY\nSLA-2:YDL.AA,YYAMYRENVETTYVGTLYLSYRDYTWAERSYLSY\nSLA-2:YDY.AA,YDSMYEENAGSTFVNNLYLSYSDYTWAEMSYAWY\nSLA-2:YTH.AA,YSSEYEEKAGSTFVGNLCLSYHDYSRAVWSYLSC\nSLA-2:es22,YYAMYREIQDNTFVNTAYFSYSYYTWAEWSYLGY\nSLA-3-CDY,YDSMYEENAGSTFVNNLYLSYSDYTWAEMSYAWY\nSLA-3-HB01,YDEMYREISDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-3-LWH,YYAMYREIQDTTYGNTLYLSYSDYTWAVESYLSY\nSLA-3-TPK,YDEMYREIGDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-3-YC,YDEMYEENAGSTFVNTLYLSYSDYTKAAMSYAWY\nSLA-3-YDL,YDEMYREIGDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-3-YDY01,YDSMYEENAGSTFVNNLYLSYSDYTWAEMSYAWY\nSLA-3-YDY02,YDSMYEENAGSTFVNNLYLSYSDYTWAEMSYAWY\nSLA-3-YTH,YDSMYEENAGSTFVNNLYLSYSDYTRAAMSYAWY\nSLA-30101,YDSMYEENAGSTFVNNLYFSYSFYTRAAMSYAWY\nSLA-30301,YDSMYEENAGSTFVNNLYLSYHDYAWAVWSYAWY\nSLA-30302,YDSMYRENSDNRYVNNLYIRYHDYSWAAWSYAGY\nSLA-30303,YDSMYEENAGSTFVNNLYLSYHDYAWAVWSYAWY\nSLA-30304,YDSMYEENAGSTFVNNLYLSYHDYAWAAWSYAWY\nSLA-30401,YDEMYKENAGSTFVNNLYLSYSDYTRAAMSYAWY\nSLA-30401,YDEMYKENAGSTFVNNLYLSYSDYTRAAMSYAWY\nSLA-30402,YDEMYEENAGSTFVNKLYLSYSDYTRDAMSYAWY\nSLA-304:01,YDEMYKENAGSTFVNNLYLSYSDYTRAAMSYAWY\nSLA-30501,YDEMYREISDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-30502,YDEMYREISDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-30503,YDEMYREISDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-30601,YDSMYEENAGSTFVNNLYLSYSDYTWAEMSYAWY\nSLA-30602,YDSMYEENAGSTFVNNLYYWYSDYTRAAMSYAWY\nSLA-3:0101,YDSMYEENAGSTFVNNLYFSYSFYTRAAMSYAWY\nSLA-3:0301,YDSMYEENAGSTFVNNLYLSYHDYAWAVWSYAWY\nSLA-3:0302,YDSMYRENSDNRYVNNLYIRYHDYSWAAWSYAGY\nSLA-3:0303,YDSMYEENAGSTFVNNLYLSYHDYAWAVWSYAWY\nSLA-3:0304,YDSMYEENAGSTFVNNLYLSYHDYAWAAWSYAWY\nSLA-3:0401,YDEMYKENAGSTFVNNLYLSYSDYTRAAMSYAWY\nSLA-3:0402,YDEMYEENAGSTFVNKLYLSYSDYTRDAMSYAWY\nSLA-3:0501,YDEMYREISDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-3:0502,YDEMYREISDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-3:0503,YDEMYREISDNTYVNNLYLSYSFYTRAAMSYAGY\nSLA-3:0601,YDSMYEENAGSTFVNNLYLSYSDYTWAEMSYAWY\nSLA-3:0602,YDSMYEENAGSTFVNNLYYWYSDYTRAAMSYAWY\nSLA-3:0701,YDSMYEENAGSTFVNNLYLSYSDYTRAAMSYAWY\nSLA-6:0101,YHSVYRQIAEHASSNVINFWSEFYIWEVYAYEWY\nSLA-6:0102,YHSVYRQIAEHASSNVINFWSEFYIWEVYAYEWY\nSLA-6:0103,YHSVYRQIAEHASSNVINFWSEFYIWEVYAYEWY\nSLA-6:0104,YHSVYRQIAEHASSNVINFWSEFYIWEVYAYEWY\nSLA-6:0105,YHSVYRQIAEHASSNVINFWSEFYIWEVYAYEWY"
  },
  {
    "path": "downloads-generation/allele_sequences/filter_sequences.py",
    "content": "\"\"\"\nFilter and combine class I sequence fastas.\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\nimport argparse\n\n\nimport Bio.SeqIO  # pylint: disable=import-error\n\nfrom mhcflurry.common import normalize_allele_name\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"fastas\",\n    nargs=\"+\",\n    help=\"Unaligned fastas\")\n\nparser.add_argument(\n    \"--out\",\n    required=True,\n    help=\"Fasta output\")\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    print(args)\n\n    total = 0\n    order = []\n    name_to_record = {}\n    for fasta in args.fastas:\n        reader = Bio.SeqIO.parse(fasta, \"fasta\")\n        for record in reader:\n            total += 1\n            if len(record.seq) < 50:\n                print(\"-- Skipping '%s', sequence too short\" % (\n                    record.description,))\n                continue\n\n            parts = record.description.split()\n            candidate_strings = [\n                record.description,\n                parts[1],\n                \" \".join(parts[1:])\n            ]\n            if record.description.startswith(\"sp|\"):\n                # From a description like:\n                # sp|P01897|HA1L_MOUSE H-2 class I histocompatibility antigen, L-D alpha chain OS=Mus musculus OX=10090 GN=H2-L PE=1 SV=2\n                # make a string to parse like:\n                # H-2-L-D\n                candidate_strings.insert(0, parts[1] + \"-\" + parts[6])\n            name = None\n            for candidate_string in candidate_strings:\n                name = normalize_allele_name(\n                    candidate_string, raise_on_error=False)\n                if name is not None:\n                    break\n            if name is None:\n                print(\"Skipping '%s'\" % (record.description,))\n                continue\n\n            if '*' not in name:\n                raise ValueError(\"Normalization gave name without a '*' for '%s': %s\" % (\n                    record.description, name))\n\n            print(\"Parsed '%s' as %s\" % (record.description, name))\n            record.description = name + \" \" + record.description\n\n            if name in name_to_record:\n                old_record = name_to_record[name]\n                old_sequence = old_record.seq\n                if len(old_sequence) < len(record.seq):\n                    print(\"-- Replacing old record (%d aa) with new (%d aa)\" % (\n                        len(old_record.seq),\n                        len(record.seq)))\n                    name_to_record[name] = record\n                else:\n                    print(\"-- Skipping, already seen\")\n            else:\n                order.append(name)\n                name_to_record[name] = record\n\n\n    records = [name_to_record[name] for name in order]\n\n    with open(args.out, \"w\") as fd:\n        Bio.SeqIO.write(records, fd, \"fasta\")\n\n    print(\"Wrote %d / %d sequences: %s\" % (len(records), total, args.out))\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/allele_sequences/make_allele_sequences.py",
    "content": "\"\"\"\nGenerate allele sequences for pan-class I models.\n\nAdditional dependency: biopython\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\nimport argparse\n\nimport numpy\nimport pandas\n\nimport Bio.SeqIO  # pylint: disable=import-error\nfrom mhcflurry.common import normalize_allele_name\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"aligned_fasta\",\n    help=\"Aligned sequences\")\n\nparser.add_argument(\n    \"--recapitulate-sequences\",\n    required=True,\n    help=\"CSV giving sequences to recapitulate\")\n\nparser.add_argument(\n    \"--differentiate-alleles\",\n    help=\"File listing alleles to differentiate using additional positions\")\n\nparser.add_argument(\n    \"--out-csv\",\n    help=\"Result file\")\n\ndef normalize_allele_name_optional(s):\n    return normalize_allele_name(s, raise_on_error=False)\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    print(args)\n\n    allele_to_sequence = {}\n    reader = Bio.SeqIO.parse(args.aligned_fasta, \"fasta\")\n    for record in reader:\n        name = record.description.split()[1]\n        print(record.name, record.description)\n        allele_to_sequence[name] = str(record.seq)\n\n    print(\"Read %d aligned sequences\" % len(allele_to_sequence))\n\n    allele_sequences = pandas.Series(allele_to_sequence).to_frame()\n    allele_sequences.columns = ['aligned']\n    allele_sequences['aligned'] = allele_sequences['aligned'].str.replace(\n        \"-\", \"X\")\n\n    allele_sequences['normalized_allele'] = allele_sequences.index.map(\n        normalize_allele_name_optional)\n    allele_sequences = allele_sequences.dropna().set_index(\"normalized_allele\", drop=True)\n\n    selected_positions = []\n\n    recapitulate_df = pandas.read_csv(args.recapitulate_sequences)\n    recapitulate_df[\"normalized_allele\"] = recapitulate_df.allele.map(normalize_allele_name_optional)\n    recapitulate_df = (\n        recapitulate_df\n            .dropna()\n            .drop_duplicates(\"normalized_allele\")\n            .set_index(\"normalized_allele\", drop=True))\n\n    allele_sequences[\"recapitulate_target\"] = recapitulate_df.iloc[:,-1]\n\n    print(\"Sequences in recapitulate CSV that are not in aligned fasta:\")\n    print(recapitulate_df.index[\n        ~recapitulate_df.index.isin(allele_sequences.index)\n    ].tolist())\n\n    allele_sequences_with_target = allele_sequences.loc[\n        ~allele_sequences.recapitulate_target.isnull()\n    ]\n\n    position_identities = []\n    target_length = int(\n        allele_sequences_with_target.recapitulate_target.str.len().max())\n    for i in range(target_length):\n        series_i = allele_sequences_with_target.recapitulate_target.str.get(i)\n        row = []\n        full_length_sequence_length = int(\n            allele_sequences_with_target.aligned.str.len().max())\n        for k in range(full_length_sequence_length):\n            series_k = allele_sequences_with_target.aligned.str.get(k)\n            row.append((series_i == series_k).mean())\n        position_identities.append(row)\n\n    position_identities = pandas.DataFrame(numpy.array(position_identities))\n    selected_positions = position_identities.idxmax(1).tolist()\n    fractions = position_identities.max(1)\n    print(\"Selected positions: \", *selected_positions)\n    print(\"Lowest concordance fraction: %0.5f\" % fractions.min())\n    assert fractions.min() > 0.99\n\n    allele_sequences[\"recapitulated\"] = allele_sequences.aligned.map(\n        lambda s: \"\".join(s[p] for p in selected_positions))\n\n    allele_sequences_with_target = allele_sequences.loc[\n        ~allele_sequences.recapitulate_target.isnull()\n    ]\n\n    agreement = (\n        allele_sequences_with_target.recapitulated ==\n        allele_sequences_with_target.recapitulate_target).mean()\n\n    print(\"Overall agreement: %0.5f\" % agreement)\n    assert agreement > 0.9\n\n    # Add additional positions\n    additional_positions = []\n    if args.differentiate_alleles:\n        differentiate_alleles = pandas.read_csv(\n            args.differentiate_alleles).iloc[:,0].values\n        print(\n            \"Read %d alleles to differentiate:\" % len(differentiate_alleles),\n            differentiate_alleles)\n\n        to_differentiate = allele_sequences.loc[\n            allele_sequences.index.isin(differentiate_alleles)\n        ].copy()\n        print(to_differentiate.shape)\n\n        additional_positions = []\n\n        # Greedy search, looking ahead 3 positions at a time.\n        possible_additional_positions = set()\n        for (_, sub_df) in to_differentiate.groupby(\"recapitulated\"):\n            if sub_df.aligned.nunique() > 1:\n                differing = pandas.DataFrame(\n                    dict([(pos, chars) for (pos, chars) in\n                    enumerate(zip(*sub_df.aligned.values)) if\n                    any(c != chars[0] for c in chars) and \"X\" not in chars])).T\n                possible_additional_positions.update(differing.index.values)\n\n        def disambiguation_score(sequences):\n            counts = pandas.Series(sequences, copy=False).value_counts()\n            score = -1 * (counts[counts > 1] - 1).sum()\n            return score\n\n        possible_additional_positions = sorted(possible_additional_positions)\n        current_sequences = to_differentiate.recapitulated\n        while current_sequences.value_counts().max() > 1:\n            to_differentiate[\"equivalence_class_size\"] = (\n                current_sequences.map(current_sequences.value_counts())\n            )\n            print(\"Ambiguous alleles\", \" \".join(\n                to_differentiate.loc[\n                    to_differentiate.equivalence_class_size > 1\n                ].index))\n            position1s = []\n            position2s = []\n            position3s = []\n            negative_position1_distances = []\n            possible_additional_positions_scores = []\n            position1_scores = []\n            for position1 in possible_additional_positions:\n                new_sequence1 = (\n                        current_sequences +\n                        to_differentiate.aligned.str.get(position1))\n                negative_position1_distance = -1 * min(\n                    abs(position1 - selected) for selected in selected_positions)\n                position1_score = disambiguation_score(new_sequence1)\n\n                for (i, position2) in enumerate(possible_additional_positions):\n                    new_sequence2 = (\n                        new_sequence1 +\n                        to_differentiate.aligned.str.get(position2))\n                    for position3 in possible_additional_positions:\n                        new_sequence3 = (\n                            new_sequence2 +\n                            to_differentiate.aligned.str.get(position3))\n\n                        score = disambiguation_score(new_sequence3)\n                        position1s.append(position1)\n                        position2s.append(position2)\n                        position3s.append(position3)\n                        possible_additional_positions_scores.append(score)\n                        negative_position1_distances.append(\n                            negative_position1_distance)\n                        position1_scores.append(position1_score)\n\n            scores_df = pandas.DataFrame({\n                \"position1\": position1s,\n                \"position2\": position2s,\n                \"position3\": position3s,\n                \"negative_position1_distance\": negative_position1_distances,\n                \"tuple_score\": possible_additional_positions_scores,\n                \"position1_score\": position1_scores,\n            }).sort_values(\n                [\"tuple_score\", \"position1_score\", \"negative_position1_distance\"],\n                ascending=False)\n            print(scores_df)\n            selected_additional_position = scores_df.iloc[0].position1\n            print(\"Selected additional position\", selected_additional_position)\n            additional_positions.append(selected_additional_position)\n            current_sequences = (\n                    current_sequences +\n                    to_differentiate.aligned.str.get(\n                        selected_additional_position))\n            possible_additional_positions.remove(selected_additional_position)\n\n    additional_positions = sorted(set(additional_positions))\n    print(\n        \"Selected %d additional positions: \" % len(additional_positions),\n        additional_positions)\n\n    extended_selected_positions = sorted(\n        set(selected_positions).union(set(additional_positions)))\n    print(\n        \"Extended selected positions (%d)\" % len(extended_selected_positions),\n        *extended_selected_positions)\n\n    allele_sequences[\"sequence\"] = allele_sequences.aligned.map(\n        lambda s: \"\".join(s[p] for p in extended_selected_positions))\n\n    allele_sequences[[\"sequence\"]].to_csv(args.out_csv, index=True)\n    print(\"Wrote: %s\" % args.out_csv)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/allele_sequences/select_alleles_to_disambiguate.py",
    "content": "\"\"\"\nSelect alleles to disambiguate\n\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\nimport argparse\n\nimport pandas\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"train_data\",\n    help=\"Path to training data CSV. Must have column: allele\")\nparser.add_argument(\n    \"--min-count\",\n    type=int,\n    metavar=\"N\",\n    help=\"Keep only alleles with at least N measurements\")\nparser.add_argument(\n    \"--out\",\n    help=\"Result file.\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    print(args)\n\n    df = pandas.read_csv(args.train_data)\n    if args.min_count:\n        allele_counts = df.allele.value_counts()\n        df = df.loc[\n            df.allele.map(allele_counts) > args.min_count\n        ]\n\n    df.drop_duplicates(\"allele\").allele.to_csv(\n        args.out, header=False, index=False)\n    print(\"Wrote: \", args.out)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/analysis_predictor_info/GENERATE.sh",
    "content": "#!/bin/bash\n#\n#\n# Usage: GENERATE.sh <local|cluster> <fresh|continue-incomplete>\n#\n# cluster mode uses an HPC cluster (Mount Sinai chimera cluster, which uses lsf job\n# scheduler). This would need to be modified for other sites.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=analysis_predictor_info\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nif [ \"$1\" != \"cluster\" ]\nthen\n    GPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\n    echo \"Detected GPUS: $GPUS\"\n\n    PROCESSORS=$(getconf _NPROCESSORS_ONLN)\n    echo \"Detected processors: $PROCESSORS\"\n\n    if [ \"$GPUS\" -eq \"0\" ]; then\n       NUM_JOBS=${NUM_JOBS-1}\n    else\n        NUM_JOBS=${NUM_JOBS-$GPUS}\n    fi\n    echo \"Num jobs: $NUM_JOBS\"\n    PARALLELISM_ARGS+=\" --num-jobs $NUM_JOBS --max-tasks-per-worker 1 --gpus $GPUS --max-workers-per-gpu 1\"\nelse\n    PARALLELISM_ARGS+=\" --cluster-parallelism --cluster-max-retries 3 --cluster-submit-command bsub --cluster-results-workdir $HOME/mhcflurry-scratch --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.lsf\"\nfi\n\nmkdir -p \"$SCRATCH_DIR\"\nif [ \"$2\" != \"continue-incomplete\" ]\nthen\n    echo \"Fresh run\"\n    rm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n    mkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nelse\n    echo \"Continuing incomplete run\"\nfi\n\n# Send stdout and stderr to a logfile included with the archive.\nLOG=\"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.$(date +%s).txt\"\nexec >  >(tee -ia \"$LOG\")\nexec 2> >(tee -ia \"$LOG\" >&2)\n\n# Log some environment info\necho \"Invocation: $0 $@\"\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nexport OMP_NUM_THREADS=1\nexport PYTHONUNBUFFERED=1\n\n####### GENERATION OF BINDING MOTIFS AND OTHER ARTIFACTS #######\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"motifs/artifacts.csv\" ]\nthen\n    echo \"Reusing existing artifacts\"\nelse\n    echo \"Using affinity predictor:\"\n    cat \"$(mhcflurry-downloads path models_class1_pan)/models.combined/info.txt\"\n\n    mkdir motifs\n    cp \"$(mhcflurry-downloads path models_class1_pan)/models.combined/info.txt\" motifs/\n\n    cp $SCRIPT_DIR/generate_artifacts.py .\n    time python generate_artifacts.py \\\n        --affinity-predictor \"$(mhcflurry-downloads path models_class1_pan)/models.combined\" \\\n        --out \"$(pwd)/motifs\" \\\n        $PARALLELISM_ARGS\nfi\n\n####### EVALUATION ON MODEL SELECTION DATA #######\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"model_selection_with_decoys.csv.bz2\" ]\nthen\n    echo \"Reusing existing model_selection_with_decoys data\"\nelse\n    echo \"Using affinity predictor:\"\n    cat \"$(mhcflurry-downloads path models_class1_pan)/models.combined/info.txt\"\n\n    cp $SCRIPT_DIR/generate_model_selection_with_decoys.py .\n    time python generate_model_selection_with_decoys.py \\\n        \"$(mhcflurry-downloads path models_class1_pan)/models.combined/model_selection_data.csv.bz2\" \\\n        --proteome-peptides \"$(mhcflurry-downloads path data_references)/uniprot_proteins.csv.bz2\" \\\n        --out \"$(pwd)/model_selection_with_decoys.csv\"\n    bzip2 -f model_selection_with_decoys.csv\nfi\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"model_selection_with_decoys.predictions.selected.csv.bz2\" ]\nthen\n    echo \"Reusing existing model_selection_with_decoys.predictions.selected.data\"\nelse\n    echo \"Using affinity predictor:\"\n    cat \"$(mhcflurry-downloads path models_class1_pan)/models.combined/info.txt\"\n\n    cp $SCRIPT_DIR/predict_on_model_selection_data.py .\n    time python predict_on_model_selection_data.py \\\n        \"$(mhcflurry-downloads path models_class1_pan)/models.combined\" \\\n        --data \"$(pwd)/model_selection_with_decoys.csv.bz2\" \\\n        --out \"$(pwd)/model_selection_with_decoys.predictions.selected.csv\" \\\n        $PARALLELISM_ARGS\n    bzip2 -f model_selection_with_decoys.predictions.selected.csv\nfi\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"model_selection_with_decoys.predictions.unselected.csv.bz2\" ]\nthen\n    echo \"Reusing existing model_selection_with_decoys.predictions.unselected data\"\nelse\n    echo \"Using affinity predictor:\"\n    cat \"$(mhcflurry-downloads path models_class1_pan_unselected)/models.unselected.combined/info.txt\"\n\n    cp $SCRIPT_DIR/predict_on_model_selection_data.py .\n    time python predict_on_model_selection_data.py \\\n        \"$(mhcflurry-downloads path models_class1_pan_unselected)/models.unselected.combined\" \\\n        --data \"$(pwd)/model_selection_with_decoys.csv.bz2\" \\\n        --out \"$(pwd)/model_selection_with_decoys.predictions.unselected.csv\" \\\n        $PARALLELISM_ARGS\n    bzip2 -f model_selection_with_decoys.predictions.unselected.csv\nfi\n\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 -f \"$LOG\"\nfor i in $(ls LOG-worker.*.txt) ; do bzip2 -f $i ; done\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n\n# Split into <2GB chunks for GitHub\nPARTS=\"${RESULT}.part.\"\n# Check for pre-existing part files and rename them.\nfor i in $(ls \"${PARTS}\"* )\ndo\n    DEST=\"${i}.OLD.$(date +%s)\"\n    echo \"WARNING: already exists: $i . Moving to $DEST\"\n    mv $i $DEST\ndone\nsplit -b 2000M \"$RESULT\" \"$PARTS\"\necho \"Split into parts:\"\nls -lh \"${PARTS}\"*\n"
  },
  {
    "path": "downloads-generation/analysis_predictor_info/cluster_submit_script_header.mssm_hpc.lsf",
    "content": "#!/bin/bash\n#BSUB -J MHCf-{work_item_num} # Job name\n#BSUB -P acc_nkcancer # allocation account or Unix group\n#BSUB -q gpu # queue\n#BSUB -R rusage[ngpus_excl_p=1]  # 1 exclusive GPU\n#BSUB -R span[hosts=1] # one node\n#BSUB -n 1 # number of compute cores\n#BSUB -W 10:00 # walltime in HH:MM\n#BSUB -R rusage[mem=20000] # mb memory requested\n#BSUB -o {work_dir}/%J.stdout # output log (%J : JobID)\n#BSUB -eo {work_dir}/STDERR # error log\n#BSUB -L /bin/bash # Initialize the execution environment\n#\n\nset -e\nset -x\n\necho \"Subsequent stderr output redirected to stdout\" >&2\nexec 2>&1\n\nexport TMPDIR=/local/JOBS/mhcflurry-{work_item_num}\nexport PATH=$HOME/.conda/envs/py36b/bin/:$PATH\nexport PYTHONUNBUFFERED=1\nexport KMP_SETTINGS=1\n\nfree -m\n\nmodule add cuda/10.0.130\nmodule list\n\nexport CUDNN_HOME=/hpc/users/odonnt02/oss/cudnn/cuda\nexport LD_LIBRARY_PATH=$CUDNN_HOME/lib64:$LD_LIBRARY_PATH\nexport CMAKE_LIBRARY_PATH=$CUDNN_HOME/lib64:$CMAKE_LIBRARY_PATH\nexport INCLUDE_PATH=$CUDNN_HOME/include:$INCLUDE_PATH\nexport C_INCLUDE_PATH=$CUDNN_HOME/include:$C_INCLUDE_PATH\nexport CPLUS_INCLUDE_PATH=$CUDNN_HOME/include:$CPLUS_INCLUDE_PATH\nexport CMAKE_INCLUDE_PATH=$CUDNN_HOME/include:$CMAKE_INCLUDE_PATH\n\npython -c 'import tensorflow as tf ; print(\"GPU AVAILABLE\" if tf.test.is_gpu_available() else \"GPU NOT AVAILABLE\")'\n\nenv\n\ncd {work_dir}\n\n"
  },
  {
    "path": "downloads-generation/analysis_predictor_info/generate_artifacts.py",
    "content": "\"\"\"\nGenerate images for MHC binding motifs.\n\nNote: a shared filesystem is assumed even when running on an HPC cluster.\nThe --out directory should be on an NFS filesystem and available to the workers.\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport numpy\nimport time\nimport collections\nfrom functools import partial\n\nimport pandas\nimport tqdm\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\nfrom mhcflurry.common import configure_logging\nfrom mhcflurry.downloads import get_path\nfrom mhcflurry.local_parallelism import (\n    add_local_parallelism_args,\n    worker_pool_with_gpu_assignments_from_args,\n    call_wrapped_kwargs)\nfrom mhcflurry.cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\n\nparser.add_argument(\n    \"--affinity-predictor\",\n    metavar=\"DIR\",\n    help=\"Pan-allele class I affinity predictor\")\nparser.add_argument(\n    \"--frequency-matrices\",\n    metavar=\"CSV\",\n    help=\"Frequency matrices\")\nparser.add_argument(\n    \"--length-distributions\",\n    metavar=\"CSV\",\n    help=\"Length distributions\")\nparser.add_argument(\n    \"--train-data\",\n    metavar=\"CSV\",\n    help=\"Training data\")\nparser.add_argument(\n    \"--alleles\",\n    nargs=\"+\",\n    help=\"Alleles to process. If not specified all alleles are used\")\nparser.add_argument(\n    \"--max-alleles\",\n    type=int,\n    help=\"Max number of allelels to process. For debugging.\")\nparser.add_argument(\n    \"--chunk-size\",\n    type=int,\n    default=100,\n    help=\"Number of alleles per job\")\nparser.add_argument(\n    \"--logo-lengths\",\n    type=int,\n    nargs=\"+\",\n    default=[8, 9, 10, 11],\n    help=\"Peptide lengths for motif logos\")\nparser.add_argument(\n    \"--length-distribution-lengths\",\n    nargs=\"+\",\n    default=[8, 9, 10, 11, 12, 13, 14, 15],\n    type=int,\n    help=\"Peptide lengths for length distribution plots\",\n)\nparser.add_argument(\n    \"--logo-cutoff\",\n    default=0.01,\n    type=float,\n    help=\"Fraction of top to use for motifs\",\n)\nparser.add_argument(\n    \"--length-cutoff\",\n    default=0.01,\n    type=float,\n    help=\"Fraction of top to use for length distribution\",\n)\nparser.add_argument(\n    \"--out\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to write results to\")\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\n\ndef run():\n    from mhcflurry.amino_acid import COMMON_AMINO_ACIDS\n\n    args = parser.parse_args(sys.argv[1:])\n\n    configure_logging()\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n\n    if not args.affinity_predictor:\n        args.affinity_predictor = get_path(\n            \"models_class1_pan\", \"models.combined\")\n        print(\"Using downloaded affinity predictor: \", args.affinity_predictor)\n\n    if not args.frequency_matrices:\n        args.frequency_matrices = os.path.join(\n            args.affinity_predictor, \"frequency_matrices.csv.bz2\")\n\n    if not args.length_distributions:\n        args.length_distributions = os.path.join(args.affinity_predictor,\n            \"length_distributions.csv.bz2\")\n\n    if not args.train_data:\n        args.train_data = os.path.join(args.affinity_predictor,\n            \"train_data.csv.bz2\")\n\n    frequency_matrices_df = pandas.read_csv(args.frequency_matrices)\n    length_distributions = pandas.read_csv(args.length_distributions)\n    train_data = pandas.read_csv(args.train_data)\n\n    alleles = args.alleles\n    if alleles:\n        print(\"Using specified alleles, \", *alleles)\n    else:\n        alleles = frequency_matrices_df.allele.unique()\n\n    if args.max_alleles:\n        alleles = alleles[:args.max_alleles]\n\n    print(\"Using %d alleles\" % len(alleles), alleles)\n\n    amino_acids = sorted(COMMON_AMINO_ACIDS)\n\n    distribution = frequency_matrices_df.loc[\n        (frequency_matrices_df.cutoff_fraction == 1.0), amino_acids\n    ].mean(0)\n\n    normalized_frequency_matrices = frequency_matrices_df.copy()\n    normalized_frequency_matrices.loc[:, amino_acids] = (\n            normalized_frequency_matrices[amino_acids] / distribution)\n\n    GLOBAL_DATA[\"args\"] = args\n    GLOBAL_DATA[\"normalized_frequency_matrices\"] = normalized_frequency_matrices\n    GLOBAL_DATA[\"length_distributions\"] = length_distributions\n    GLOBAL_DATA[\"train_data\"] = train_data\n\n    artifacts_out = os.path.join(args.out, \"artifacts\")\n\n    if not os.path.exists(args.out):\n        os.mkdir(args.out)\n\n    if not os.path.exists(artifacts_out):\n        os.mkdir(artifacts_out)\n\n    tasks = [\n        {\n            \"task_num\": i,\n            \"allele\": allele,\n            \"out_dir\": artifacts_out,\n        }\n        for (i, allele) in enumerate(alleles)\n    ]\n\n    jobs = []\n    for task in tasks:\n        if not jobs or len(jobs[-1]['tasks']) >= args.chunk_size:\n            jobs.append({'tasks': []})\n        jobs[-1]['tasks'].append(task)\n\n    print(\"Generated %d tasks, packed into %d jobs\" % (len(tasks), len(jobs)))\n\n    worker_pool = None\n    start = time.time()\n\n    if serial_run:\n        # Serial run\n        print(\"Running in serial.\")\n        results = (\n            do_job(**job) for job in jobs)\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        print(\"Running on cluster.\")\n        results = cluster_results_from_args(\n            args,\n            work_function=do_job,\n            work_items=jobs,\n            constant_data=GLOBAL_DATA,\n            input_serialization_method=\"dill\",\n            result_serialization_method=\"pickle\",\n            clear_constant_data=False)\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n\n        for task in tasks:\n            task['constant_data'] = GLOBAL_DATA\n\n        results = worker_pool.imap_unordered(\n            partial(call_wrapped_kwargs, do_job),\n            jobs,\n            chunksize=1)\n\n    print(\"Reading results\")\n\n    task_results = {}\n\n    for job_result in tqdm.tqdm(results, total=len(jobs)):\n        for task_result in job_result:\n            task_results[task_result['task_num']] = task_result\n\n    print(\"Received all results in %0.2f sec\" % (time.time() - start))\n\n    artifacts_df = pandas.DataFrame(task_results).T.set_index(\"task_num\")\n\n    length_distributions_out = os.path.join(args.out,\n        \"length_distributions.csv\")\n    length_distributions.to_csv(length_distributions_out,\n        index=False)\n    print(\"Wrote: \", length_distributions_out)\n\n    artifacts_summary_out = os.path.join(args.out, \"artifacts.csv\")\n    artifacts_df.to_csv(artifacts_summary_out)\n    print(\"Wrote: \", artifacts_summary_out)\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n\ndef do_job(tasks, constant_data=GLOBAL_DATA):\n    # Nested functions are so that the do_job function can be pickled for\n    # running on an HPC cluster.\n    GLOBAL_DATA = constant_data\n\n    def do_task(task_num, allele, out_dir, constant_data=GLOBAL_DATA):\n        args = constant_data['args']\n        normalized_frequency_matrices = constant_data[\n            'normalized_frequency_matrices'\n        ]\n        length_distributions = constant_data[\n            'length_distributions'\n        ]\n        train_data = constant_data[\n            'train_data'\n        ]\n\n        logo_filename = write_logo(\n            normalized_frequency_matrices,\n            allele=allele,\n            lengths=args.logo_lengths,\n            cutoff=args.logo_cutoff,\n            models_label=\"standard\",\n            out_dir=out_dir,\n        )\n\n        length_distribution_filename = write_length_distribution(\n            length_distributions,\n            allele=allele,\n            lengths=args.length_distribution_lengths,\n            cutoff=args.length_cutoff,\n            models_label=\"standard\",\n            out_dir=out_dir)\n\n        (train_data_filename, num_train_points) = write_train_data(\n            train_data,\n            allele=allele,\n            models_label=\"standard\",\n            out_dir=out_dir)\n\n        return {\n            'task_num': task_num,\n            'allele': allele,\n            'logo_filename': logo_filename,\n            'length_distribution_filename': length_distribution_filename,\n            'train_data_filename': train_data_filename,\n            'num_train_points': num_train_points,\n        }\n\n\n    def write_logo(\n            normalized_frequency_matrices,\n            allele,\n            lengths,\n            cutoff,\n            models_label,\n            out_dir):\n\n        import seaborn\n        from matplotlib import pyplot\n        import logomaker\n        import os\n        from mhcflurry.amino_acid import COMMON_AMINO_ACIDS\n\n        amino_acids = sorted(COMMON_AMINO_ACIDS)\n\n        fig = pyplot.figure(figsize=(8,10))\n\n        for (i, length) in enumerate(lengths):\n            ax = pyplot.subplot(len(lengths), 1, i + 1)\n            matrix = normalized_frequency_matrices.loc[\n                (normalized_frequency_matrices.allele == allele) &\n                (normalized_frequency_matrices.length == length) &\n                (normalized_frequency_matrices.cutoff_fraction == cutoff)\n            ].set_index(\"position\")[amino_acids]\n            if matrix.shape[0] == 0:\n                return None\n\n            matrix = (matrix.T / matrix.sum(1)).T  # row normalize\n\n            ss_logo = logomaker.Logo(\n                matrix,\n                color_scheme=\"NajafabadiEtAl2017\",\n                font_name=\"Arial\",\n                width=.8,\n                vpad=.05,\n                fade_probabilities=True,\n                stack_order='small_on_top',\n                ax=ax,\n            )\n            pyplot.title(\n                \"%s %d-mer\" % (allele, length), y=0.85)\n            pyplot.xticks(matrix.index.values)\n            seaborn.despine()\n\n        pyplot.tight_layout()\n        name = \"%s.motifs.%s.png\" % (\n            allele.replace(\"*\", \"-\").replace(\":\", \"-\"), models_label)\n        filename = os.path.abspath(os.path.join(out_dir, name))\n        pyplot.savefig(filename)\n        print(\"Wrote: \", filename)\n        fig.clear()\n        pyplot.close(fig)\n        return name\n\n\n    def write_length_distribution(\n            length_distributions_df, allele, lengths, cutoff, models_label, out_dir):\n\n        from matplotlib import pyplot\n        import seaborn\n        import os\n\n        length_distribution = length_distributions_df.loc[\n            (length_distributions_df.allele == allele) &\n            (length_distributions_df.cutoff_fraction == cutoff)\n        ]\n        if length_distribution.shape[0] == 0:\n            return None\n\n        length_distribution = length_distribution.set_index(\n            \"length\").reindex(lengths).fillna(0.0).reset_index()\n\n        length_distribution.plot(\n            x=\"length\", y=\"fraction\", kind=\"bar\", figsize=(5, 3))\n        fig = pyplot.gcf()\n        pyplot.title(\"%s\" % allele, fontsize=10)\n        pyplot.xlabel(\"Peptide length\", fontsize=10)\n        pyplot.xticks(rotation=0)\n        pyplot.ylim(ymin=0, ymax=1.0)\n        pyplot.ylabel(\"Fraction of top %0.1f%%\" % (cutoff * 100.0), fontsize=10)\n        pyplot.gca().get_legend().remove()\n        pyplot.tight_layout()\n\n        seaborn.despine()\n\n        name = \"%s.lengths.%s.png\" % (\n            allele.replace(\"*\", \"-\").replace(\":\", \"-\"), models_label)\n\n        filename = os.path.abspath(os.path.join(out_dir, name))\n        pyplot.savefig(filename)\n        print(\"Wrote: \", filename)\n        fig.clear()\n        pyplot.close(fig)\n        return name\n\n    def write_train_data(train_data, allele, models_label, out_dir):\n        import os\n        sub_train = train_data.loc[\n            train_data.allele == allele\n        ]\n\n        name = None\n        if sub_train.shape[0] > 0:\n            name = \"%s.train_data.%s.csv\" % (\n                allele.replace(\"*\", \"-\").replace(\":\", \"-\"), models_label)\n            filename = os.path.abspath(os.path.join(out_dir, name))\n            sub_train.to_csv(filename, index=False)\n            print(\"Wrote: \", filename)\n        return (name, len(sub_train))\n\n    return [do_task(constant_data=constant_data, **task) for task in tasks]\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/analysis_predictor_info/generate_model_selection_with_decoys.py",
    "content": "\"\"\"\nFrom affinity predictor model selection data, add decoys so that AUCs can be\ncalculated per-allele.\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport numpy\nimport math\nimport collections\n\nimport pandas\nimport tqdm\n\nimport mhcflurry\nfrom mhcflurry.downloads import get_path\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"data\",\n    metavar=\"CSV\",\n    help=\"Model selection data\")\nparser.add_argument(\n    \"--proteome-peptides\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Proteome peptides\")\nparser.add_argument(\n    \"--protein-data\",\n    metavar=\"CSV\",\n    default=get_path(\"data_references\", \"uniprot_proteins.csv.bz2\", test_exists=False),\n    help=\"Proteome data. Default: %(default)s.\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"File to write\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n\n    data_df = pandas.read_csv(args.data)\n    print(\"Read\", args.data, len(data_df))\n    print(data_df)\n\n    fold_cols = [col for col in data_df.columns if col.startswith(\"fold_\")]\n    print(\"Fold cols\", fold_cols)\n    assert len(fold_cols) > 1\n\n    eval_df = data_df.loc[\n        data_df[fold_cols].sum(1) < len(fold_cols)\n    ].copy()\n\n    eval_df[\"binder\"] = (eval_df.measurement_inequality != '>') & (\n        eval_df.measurement_value <= 500)\n\n    print(\"Reduced to data held-out at least once: \", len(eval_df))\n    print(\"Binder rate per allele:\")\n    print(eval_df.groupby(\"allele\").binder.mean())\n\n    decoy_universe = pandas.read_csv(args.protein_data, usecols=[\"seq\"])\n    decoy_universe = pandas.Series(decoy_universe.seq.unique())\n    decoy_universe = decoy_universe.loc[\n        decoy_universe.str.match(\"^[%s]+$\" % \"\".join(\n            mhcflurry.amino_acid.COMMON_AMINO_ACIDS)) & (\n            decoy_universe.str.len() >= 50)\n    ]\n    print(\"Read decoy universe from\", args.protein_data)\n    print(decoy_universe)\n\n    def make_decoys(num, length):\n        return decoy_universe.sample(num, replace=True).map(\n            lambda s: s[numpy.random.randint(0, len(s) - length):][:length]).values\n\n    lengths = [8,9,10,11]\n\n    pieces = []\n    real_df = eval_df.loc[\n        eval_df.peptide.str.len().isin(lengths)].copy()\n    real_df[\"synthetic\"] = False\n    pieces.append(real_df)\n\n    for length in lengths:\n        decoys_df = real_df.loc[real_df.binder].copy()\n        decoys_df.binder = False\n        decoys_df.measurement_value = numpy.nan\n        decoys_df.synthetic = True\n        decoys_df[\"peptide\"] = make_decoys(len(decoys_df), length)\n        pieces.append(decoys_df)\n\n    result_df = pandas.concat(pieces, ignore_index=True)\n\n    print(\"Final binder rate per allele:\")\n    print(result_df.groupby(\"allele\").binder.mean())\n\n    result_df.to_csv(args.out, index=False)\n    print(\"Wrote: \", args.out)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/analysis_predictor_info/predict_on_model_selection_data.py",
    "content": "\"\"\"\nEvaluate affinity predictor on its held-out model selection data, using only\nthe individual models that were not trained on each particular data point.\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport numpy\nimport time\nimport collections\nfrom functools import partial\n\nimport pandas\nimport tqdm\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\nfrom mhcflurry.common import configure_logging\nfrom mhcflurry.local_parallelism import (\n    add_local_parallelism_args,\n    worker_pool_with_gpu_assignments_from_args,\n    call_wrapped_kwargs)\nfrom mhcflurry.cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"predictor\",\n    metavar=\"DIR\",\n    help=\"Class 1 affinity predictor to use\")\nparser.add_argument(\n    \"--data\",\n    metavar=\"CSV\",\n    help=\"Model selection data. If not specified will guess based on affinity \"\n    \"predictor path\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"File to write with predictions\")\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\n\ndef do_predict(predictor, key, sub_df, constant_data=None):\n    import tqdm\n    tqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n    prediction = predictor.predict(sub_df.peptide, sub_df.allele, throw=False)\n    return {\n        \"key\": key,\n        \"index\": sub_df.index,\n        \"prediction\": prediction,\n    }\n\n\ndef run():\n    import mhcflurry\n\n    args = parser.parse_args(sys.argv[1:])\n\n    configure_logging()\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n\n    if not args.data:\n        args.data = os.path.join(args.predictor, 'model_selection_data.csv.bz2')\n        print(\"Defaulting data to: \", args.data)\n\n    data_df = pandas.read_csv(args.data)\n    print(\"Read %d rows:\" % len(data_df))\n    print(data_df)\n\n    fold_cols = [col for col in data_df.columns if col.startswith(\"fold_\")]\n    print(\"Fold cols\", fold_cols)\n    assert len(fold_cols) > 1\n\n    eval_df = data_df.loc[\n        data_df[fold_cols].sum(1) < len(fold_cols)\n    ].copy()\n\n    print(\"Reduced to data held-out at least once: \", len(eval_df))\n\n    predictor = mhcflurry.Class1AffinityPredictor.load(\n        args.predictor, optimization_level=0)\n    print(\"Loaded predictor\", predictor)\n\n    fold_to_ensemble = collections.defaultdict(list)\n    for n in predictor.neural_networks:\n        fold = n.fit_info[-1]['training_info']['fold_num']\n        fold_to_ensemble[fold].append(n)\n    print(\"Constructed fold_to_ensemble\", fold_to_ensemble)\n\n    eval_df[\"ensemble_key\"] = (\n        (~eval_df[fold_cols]).astype(str) + \"_\"\n    ).sum(1).str.strip(\"_\")\n    print(\"Established ensemble keys:\")\n    print(eval_df.ensemble_key.value_counts())\n\n    def predictor_for_ensemble_key(key_string):\n        indicators = [eval(s) for s in key_string.split(\"_\")]\n        ensemble = []\n        for fold, indicator in enumerate(indicators):\n            if indicator:\n                ensemble.extend(fold_to_ensemble[fold])\n        pred = mhcflurry.Class1AffinityPredictor(\n            class1_pan_allele_models=ensemble,\n            allele_to_sequence=predictor.allele_to_sequence)\n        return pred\n\n    tasks = []\n    for (key, sub_df) in eval_df.groupby(\"ensemble_key\"):\n        print(key)\n        pred = predictor_for_ensemble_key(key)\n        assert len(pred.neural_networks) > 0\n        eval_df.loc[\n            sub_df.index,\n            \"ensemble_size\"\n        ] = len(pred.neural_networks)\n        tasks.append({\n            \"key\": key,\n            \"predictor\": pred,\n            \"sub_df\": sub_df[[\"peptide\", \"allele\"]].copy()\n        })\n\n    worker_pool = None\n    start = time.time()\n\n    if serial_run:\n        # Serial run\n        print(\"Running in serial.\")\n        results = (\n            do_predict(**task) for task in tasks)\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        print(\"Running on cluster.\")\n        results = cluster_results_from_args(\n            args,\n            work_function=do_predict,\n            work_items=tasks,\n            constant_data=GLOBAL_DATA,\n            input_serialization_method=\"dill\",\n            result_serialization_method=\"pickle\",\n            clear_constant_data=False)\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n        results = worker_pool.imap_unordered(\n            partial(call_wrapped_kwargs, do_predict),\n            tasks,\n            chunksize=1)\n\n    print(\"Reading results\")\n\n    for worker_result in tqdm.tqdm(results, total=len(tasks)):\n        print(\"Received worker result:\", worker_result['key'])\n        print(worker_result)\n\n        eval_df.loc[\n            worker_result['index'],\n            \"prediction\"\n        ] = worker_result[\"prediction\"]\n\n    print(\"Received all results in %0.2f sec\" % (time.time() - start))\n\n    eval_df.to_csv(args.out, index=False)\n    print(\"Wrote: \", args.out)\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/analysis_predictor_info/requirements.txt",
    "content": "logomaker\nseaborn"
  },
  {
    "path": "downloads-generation/data_curated/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Create \"curated\" training data, which combines an IEDB download with additional\n# published data, removes unusable entries, normalizes allele name, and performs\n# other filtering and standardization.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=data_curated\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\nexport PYTHONUNBUFFERED=1\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\ncp $SCRIPT_DIR/curate.py .\ncp $SCRIPT_DIR/curate_ms_by_pmid.py .\n\nMS_DIR=\"$(mhcflurry-downloads path data_published)/ms\"\ncp -r \"$MS_DIR\" .\n\nEXPRESSION_DIR=\"$(mhcflurry-downloads path data_published)/expression\"\ncp -r \"$EXPRESSION_DIR\" .\n\nCURATE_BY_PMID_ARGS=\"\"\nfor pmid in $(ls ms)\ndo\n    CURATE_BY_PMID_ARGS+=$(echo --ms-item $pmid ms/$pmid/* ' ')\ndone\nfor item in $(ls expression)\ndo\n    CURATE_BY_PMID_ARGS+=$(echo --expression-item $item expression/$item/* ' ')\ndone\n\ntime python curate_ms_by_pmid.py $CURATE_BY_PMID_ARGS \\\n    --ms-out ms.by_pmid.csv \\\n    --expression-out rna_expression.csv \\\n    --expression-metadata-out rna_expression.metadata.csv\n\nbzip2 ms.by_pmid.csv\nbzip2 rna_expression.csv\n\nrm -rf ms\n\ntime python curate.py \\\n    --data-iedb \\\n        \"$(mhcflurry-downloads path data_iedb)/mhc_ligand_full.csv.bz2\" \\\n    --data-kim2014 \\\n        \"$(mhcflurry-downloads path data_published)/bdata.20130222.mhci.public.1.txt\" \\\n    --data-systemhc-atlas \\\n        \"$(mhcflurry-downloads path data_systemhcatlas)/data.csv.bz2\" \\\n    --data-additional-ms \"$(pwd)/ms.by_pmid.csv.bz2\" \\\n    --out-csv curated_training_data.csv \\\n    --out-affinity-csv curated_training_data.affinity.csv \\\n    --out-mass-spec-csv curated_training_data.mass_spec.csv\n\ntime python curate.py \\\n    --data-iedb \\\n        \"$(mhcflurry-downloads path data_iedb)/mhc_ligand_full.csv.bz2\" \\\n    --data-kim2014 \\\n        \"$(mhcflurry-downloads path data_published)/bdata.20130222.mhci.public.1.txt\" \\\n    --data-systemhc-atlas \\\n        \"$(mhcflurry-downloads path data_systemhcatlas)/data.csv.bz2\" \\\n    --out-csv curated_training_data.no_additional_ms.csv\n\nfor i in $(ls *.csv)\ndo\n    bzip2 $i\ndone\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/data_curated/README.md",
    "content": "# Combined training data\n\nThis download contains the data used to train the production class1 MHCflurry models. This data is derived from a recent [IEDB](http://www.iedb.org/home_v3.php) export as well as the data from [Kim 2014](http://bmcbioinformatics.biomedcentral.com/articles/10.1186/1471-2105-15-241), as well as a number of other sources.\n\nTo generate this download run:\n\n```\npip install -r requirements.txt  # for the first time you generate this download\n./GENERATE.sh\n```\n"
  },
  {
    "path": "downloads-generation/data_curated/curate.py",
    "content": "\"\"\"\nFilter and combine various peptide/MHC datasets to derive a composite training set,\noptionally including eluted peptides identified by mass-spec.\n\"\"\"\nimport sys\nimport os\nimport argparse\n\nimport pandas\nimport numpy\n\nfrom mhcflurry.common import normalize_allele_name\n\n\ndef normalize_allele_name_or_return_unknown(s):\n    if s is numpy.nan:\n        return \"UNKNOWN\"\n    return normalize_allele_name(\n        s,\n        raise_on_error=False,\n        default_value=\"UNKNOWN\")\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data-kim2014\",\n    action=\"append\",\n    default=[],\n    help=\"Path to Kim 2014-style affinity data\")\nparser.add_argument(\n    \"--data-iedb\",\n    action=\"append\",\n    default=[],\n    help=\"Path to IEDB-style affinity data (e.g. mhc_ligand_full.csv)\")\nparser.add_argument(\n    \"--data-additional-ms\",\n    action=\"append\",\n    default=[],\n    help=\"Path to additional monoallelic mass spec hits\")\nparser.add_argument(\n    \"--data-systemhc-atlas\",\n    action=\"append\",\n    default=[],\n    help=\"Path to systemhc-atlas-style mass-spec data\")\n\nparser.add_argument(\n    \"--out-csv\",\n    required=True,\n    help=\"Combined result file\")\nparser.add_argument(\n    \"--out-affinity-csv\",\n    required=False,\n    help=\"Result file\")\nparser.add_argument(\n    \"--out-mass-spec-csv\",\n    required=False,\n    help=\"Result file\")\n\n# Note: the value assigned to \"Positive\" entries in the dict below is the\n# default that will be used for mass spec hits when training the predictor.\n# However, as of version 2.0.0 we are reassigning MS entries in the training\n# script for the pan-allele predictor (see\n# downloads-generation/models_class1_pan/GENERATE.sh where\n# reassign_mass_spec_training_data.py is called). So you should look there to\n# see what the current setting for this value is.\nQUALITATIVE_TO_AFFINITY_AND_INEQUALITY = {\n    \"Negative\": (5000.0, \">\"),\n    \"Positive\": (100.0, \"<\"),  # used for mass-spec hits but see note above\n    \"Positive-High\": (100.0, \"<\"),\n    \"Positive-Intermediate\": (1000.0, \"<\"),\n    \"Positive-Low\": (5000.0, \"<\"),\n}\nQUALITATIVE_TO_AFFINITY = dict(\n    (key, value[0]) for (key, value)\n    in QUALITATIVE_TO_AFFINITY_AND_INEQUALITY.items())\nQUALITATIVE_TO_INEQUALITY = dict(\n    (key, value[1]) for (key, value)\n    in QUALITATIVE_TO_AFFINITY_AND_INEQUALITY.items())\n\n\nEXCLUDE_IEDB_ALLELES = [\n    \"HLA class I\",\n    \"HLA class II\",\n]\n\n\ndef load_data_kim2014(filename):\n    df = pandas.read_table(filename)\n    print(\"Loaded kim2014 data: %s\" % str(df.shape))\n    df[\"measurement_source\"] = \"kim2014\"\n    df[\"measurement_kind\"] = \"affinity\"\n    df[\"measurement_value\"] = df.meas\n    df[\"measurement_type\"] = (df.inequality == \"=\").map({\n        True: \"quantitative\",\n        False: \"qualitative\",\n    })\n    df[\"measurement_inequality\"] = df.inequality\n    df[\"original_allele\"] = df.mhc\n    df[\"peptide\"] = df.sequence\n    df[\"allele\"] = df.mhc.map(normalize_allele_name_or_return_unknown)\n    print(\"Dropping un-parseable alleles: %s\" % \", \".join(\n        df.loc[df.allele == \"UNKNOWN\"][\"mhc\"].unique()))\n    df = df.loc[df.allele != \"UNKNOWN\"]\n\n    print(\"Loaded kim2014 data: %s\" % str(df.shape))\n    return df\n\n\ndef load_data_systemhc_atlas(filename, min_probability=0.99):\n    df = pandas.read_csv(filename)\n    print(\"Loaded systemhc atlas data: %s\" % str(df.shape))\n\n    df[\"measurement_kind\"] = \"mass_spec\"\n    df[\"measurement_source\"] = \"systemhc-atlas\"\n    df[\"measurement_value\"] = QUALITATIVE_TO_AFFINITY[\"Positive\"]\n    df[\"measurement_inequality\"] = \"<\"\n    df[\"measurement_type\"] = \"qualitative\"\n    df[\"original_allele\"] = df.top_allele\n    df[\"peptide\"] = df.search_hit\n    df[\"allele\"] = df.top_allele.map(normalize_allele_name_or_return_unknown)\n\n    print(\"Dropping un-parseable alleles: %s\" % \", \".join(\n        str(x) for x in df.loc[df.allele == \"UNKNOWN\"][\"top_allele\"].unique()))\n    df = df.loc[df.allele != \"UNKNOWN\"]\n    print(\"Systemhc atlas data now: %s\" % str(df.shape))\n\n    print(\"Dropping data points with probability < %f\" % min_probability)\n    df = df.loc[df.prob >= min_probability]\n    print(\"Systemhc atlas data now: %s\" % str(df.shape))\n\n    print(\"Removing duplicates\")\n    df = df.drop_duplicates([\"allele\", \"peptide\"])\n    print(\"Systemhc atlas data now: %s\" % str(df.shape))\n\n    return df\n\n\ndef load_data_iedb(iedb_csv, include_qualitative=True):\n    iedb_df = pandas.read_csv(iedb_csv, skiprows=1, low_memory=False)\n    print(\"Loaded iedb data: %s\" % str(iedb_df.shape))\n\n    print(\"Selecting only class I\")\n    iedb_df = iedb_df.loc[\n        # Old naming:\n        # iedb_df[\"MHC allele class\"].str.strip().str.upper() == \"I\"\n        # New names IEDB has adopted:\n        iedb_df[\"Class\"].str.strip().str.upper() == \"I\"\n    ]\n    print(\"New shape: %s\" % str(iedb_df.shape))\n\n    iedb_df[\"Allele Name\"] = iedb_df[\"Name.6\"]\n\n    print(\"Dropping known unusable alleles\")\n    iedb_df = iedb_df.loc[\n        ~iedb_df[\"Allele Name\"].isin(EXCLUDE_IEDB_ALLELES)\n    ]\n    iedb_df = iedb_df.loc[\n        (~iedb_df[\"Allele Name\"].str.contains(\"mutant\")) &\n        (~iedb_df[\"Allele Name\"].str.contains(\"CD1\"))\n    ]\n\n    # Drop insufficiently specific allele names like \"HLA-A03\":\n    insuffient_mask = (\n        (~iedb_df[\"Allele Name\"].str.upper().str.startswith(\"H2-\")) &\n        (~iedb_df[\"Allele Name\"].str.upper().str.startswith(\"H-2-\")) &\n        (~iedb_df[\"Allele Name\"].str.upper().str.startswith(\"MAMU\")) &\n        (iedb_df[\"Allele Name\"].str.findall(\"[0-9]\").str.len() < 4)\n    )\n    print(\"Dropping %d records with insufficiently-specific allele names:\" %\n        insuffient_mask.sum())\n    print(iedb_df.loc[insuffient_mask][\"Allele Name\"].value_counts())\n    iedb_df = iedb_df.loc[~insuffient_mask]\n\n    iedb_df[\"allele\"] = iedb_df[\"Allele Name\"].map(\n        normalize_allele_name_or_return_unknown)\n    print(\"Dropping un-parseable alleles: %s\" % \", \".join(\n        iedb_df.loc[iedb_df.allele == \"UNKNOWN\"][\"Allele Name\"].unique()))\n    iedb_df = iedb_df.loc[iedb_df.allele != \"UNKNOWN\"]\n\n    print(\"IEDB measurements per allele:\\n%s\" % iedb_df.allele.value_counts())\n\n    quantitative = iedb_df.loc[iedb_df[\"Units\"] == \"nM\"].copy()\n    quantitative[\"measurement_kind\"] = \"affinity\"\n    quantitative[\"measurement_type\"] = \"quantitative\"\n    quantitative[\"measurement_inequality\"] = quantitative[\n        \"Measurement Inequality\"\n    ].fillna(\"=\").map(lambda s: {\">=\": \">\", \"<=\": \"<\"}.get(s, s))\n    print(\"Quantitative measurements: %d\" % len(quantitative))\n\n    qualitative = iedb_df.loc[iedb_df[\"Units\"].isnull()].copy()\n    qualitative[\"measurement_type\"] = \"qualitative\"\n    qualitative[\"measurement_kind\"] = qualitative[\n        \"Method\"\n    ].str.contains(\"mass spec\").map({\n        True: \"mass_spec\",\n        False: \"affinity\",\n    })\n    print(\"Qualitative measurements: %d\" % len(qualitative))\n\n    qualitative[\"Quantitative measurement\"] = (\n        qualitative[\"Qualitative Measurement\"].map(QUALITATIVE_TO_AFFINITY))\n    qualitative[\"measurement_inequality\"] = (\n        qualitative[\"Qualitative Measurement\"].map(QUALITATIVE_TO_INEQUALITY))\n\n    print(\"Qualitative measurements (possibly after dropping MS): %d\" % (\n        len(qualitative)))\n\n    iedb_df = pandas.concat(\n        (\n            ([quantitative]) +\n            ([qualitative] if include_qualitative else [])),\n        ignore_index=True)\n\n    print(\"IEDB measurements per allele:\\n%s\" % iedb_df.allele.value_counts())\n\n    print(\"Subselecting to valid peptides. Starting with: %d\" % len(iedb_df))\n    iedb_df[\"Description\"] = iedb_df.Name.str.strip()\n    iedb_df = iedb_df.loc[\n        iedb_df.Description.str.match(\"^[ACDEFGHIKLMNPQRSTVWY]+$\")\n    ]\n    print(\"Now: %d\" % len(iedb_df))\n\n    print(\"Annotating last author and category\")\n    iedb_df[\"last_author\"] = iedb_df.Authors.map(\n        lambda x: (\n            x.split(\";\")[-1]\n            .split(\",\")[-1]\n            .split(\" \")[-1]\n            .strip()\n            .replace(\"*\", \"\"))).values\n    iedb_df[\"category\"] = (\n        iedb_df[\"last_author\"] + \" - \" + iedb_df[\"Method\"]).values\n\n    train_data = pandas.DataFrame()\n    train_data[\"peptide\"] = iedb_df.Description.values\n    train_data[\"measurement_value\"] = iedb_df[\n        \"Quantitative measurement\"\n    ].values\n    train_data[\"measurement_source\"] = iedb_df.category.values\n    train_data[\"measurement_inequality\"] = iedb_df.measurement_inequality.values\n\n    train_data[\"allele\"] = iedb_df[\"allele\"].values\n    train_data[\"original_allele\"] = iedb_df[\"Allele Name\"].values\n    train_data[\"measurement_type\"] = iedb_df[\"measurement_type\"].values\n    train_data[\"measurement_kind\"] = iedb_df[\"measurement_kind\"].values\n    train_data = train_data.drop_duplicates().reset_index(drop=True)\n\n    return train_data\n\n\ndef load_data_additional_ms(filename):\n    df = pandas.read_csv(filename)\n    print(\"Loaded additional MS\", filename, df.shape)\n    print(df)\n    print(\"Entries:\", len(df))\n\n    print(\"Subselecting to monoallelic\")\n    df = df.loc[\n        df.format == \"MONOALLELIC\"\n    ].copy()\n    print(\"Now\", len(df))\n\n    df[\"allele\"] = df[\"hla\"].map(normalize_allele_name_or_return_unknown)\n    assert not (df.allele == \"UNKNOWN\").any(), (\n        list(df.loc[df.allele == \"UNKNOWN\"].hla.unique()))\n    df[\"measurement_value\"] = QUALITATIVE_TO_AFFINITY[\"Positive\"]\n    df[\"measurement_inequality\"] = \"<\"\n    df[\"measurement_type\"] = \"qualitative\"\n    df[\"measurement_kind\"] = \"mass_spec\"\n    df[\"measurement_source\"] = \"MS:pmid:\" + df[\"original_pmid\"].map(str)\n    df[\"original_allele\"] = \"\"\n    return df\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n\n    dfs = []\n    for filename in args.data_iedb:\n        df = load_data_iedb(filename)\n        dfs.append(df)\n    for filename in args.data_kim2014:\n        df = load_data_kim2014(filename)\n        df[\"allele_peptide\"] = df.allele + \"_\" + df.peptide\n\n        # Give precedence to IEDB data.\n        if dfs:\n            iedb_df = dfs[0]\n            iedb_df[\"allele_peptide\"] = iedb_df.allele + \"_\" + iedb_df.peptide\n            print(\"Dropping kim2014 data present in IEDB.\")\n            df = df.loc[\n                ~df.allele_peptide.isin(iedb_df.allele_peptide)\n            ]\n            print(\"Kim2014 data now: %s\" % str(df.shape))\n        dfs.append(df)\n    for filename in args.data_systemhc_atlas:\n        df = load_data_systemhc_atlas(filename)\n        dfs.append(df)\n\n    for filename in args.data_additional_ms:\n        df = load_data_additional_ms(filename)\n        dfs.append(df)\n\n    df = pandas.concat(dfs, ignore_index=True)\n    print(\"Combined df: %s\" % (str(df.shape)))\n\n    print(\"Removing combined duplicates\")\n    df = df.drop_duplicates(\n        [\"allele\", \"peptide\", \"measurement_value\", \"measurement_kind\"])\n    print(\"New combined df: %s\" % (str(df.shape)))\n\n    df = df[[\n        \"allele\",\n        \"peptide\",\n        \"measurement_value\",\n        \"measurement_inequality\",\n        \"measurement_type\",\n        \"measurement_kind\",\n        \"measurement_source\",\n        \"original_allele\",\n    ]].sort_values([\"allele\", \"peptide\"]).dropna()\n\n    print(\"Final combined df: %s\" % (str(df.shape)))\n\n    print(\"Measurement sources:\")\n    print(df.measurement_source.value_counts())\n\n    print(\"Measurement kind:\")\n    print(df.measurement_kind.value_counts())\n\n    print(\"Measurement source / kind:\")\n    print(\n        df.groupby(\n            [\"measurement_source\", \"measurement_kind\"]\n        ).peptide.count().sort_values())\n\n    def write(write_df, filename):\n        filename = os.path.abspath(filename)\n        write_df.to_csv(filename, index=False)\n        print(\"Wrote [%d lines]: %s\" % (len(write_df), filename))\n\n    write(df, args.out_csv)\n    if args.out_affinity_csv:\n        write(\n            df.loc[df.measurement_kind == \"affinity\"],\n            args.out_affinity_csv)\n    if args.out_mass_spec_csv:\n        write(\n            df.loc[df.measurement_kind == \"mass_spec\"],\n            args.out_mass_spec_csv)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_curated/curate_ms_by_pmid.py",
    "content": "\"\"\"\nFilter and combine various peptide/MHC datasets to derive a composite training set,\noptionally including eluted peptides identified by mass-spec.\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport json\nimport collections\nfrom six.moves import StringIO\n\nimport pandas\n\nfrom mhcflurry.common import normalize_allele_name\n\n\ndef normalize_allele_name_or_return_unknown(s):\n    return normalize_allele_name(\n        s,\n        raise_on_error=False,\n        default_value=\"UNKNOWN\")\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--ms-item\",\n    nargs=\"+\",\n    action=\"append\",\n    metavar=\"PMID FILE, ... FILE\",\n    default=[],\n    help=\"Mass spec item to curate: PMID and list of files\")\nparser.add_argument(\n    \"--expression-item\",\n    nargs=\"+\",\n    action=\"append\",\n    metavar=\"LABEL FILE, ... FILE\",\n    default=[],\n    help=\"Expression data to curate: dataset label and list of files\")\nparser.add_argument(\n    \"--ms-out\",\n    metavar=\"OUT.csv\",\n    help=\"Out file path (MS data)\")\nparser.add_argument(\n    \"--expression-out\",\n    metavar=\"OUT.csv\",\n    help=\"Out file path (RNA-seq expression)\")\nparser.add_argument(\n    \"--expression-metadata-out\",\n    metavar=\"OUT.csv\",\n    help=\"Out file path for expression metadata, i.e. which samples used\")\nparser.add_argument(\n    \"--debug\",\n    action=\"store_true\",\n    default=False,\n    help=\"Leave user in pdb if PMID is unsupported\")\n\nPMID_HANDLERS = {}\nEXPRESSION_HANDLERS = {}\n\ndef load(filenames, **kwargs):\n    result = {}\n    for filename in filenames:\n        if filename.endswith(\".csv\"):\n            result[filename] = pandas.read_csv(filename, **kwargs)\n        elif filename.endswith(\".xlsx\") or filename.endswith(\".xls\"):\n            result[filename] = pandas.read_excel(filename, **kwargs)\n        else:\n            result[filename] = filename\n\n    return result\n\n\ndef debug(*filenames):\n    loaded = load(filenames)\n    import ipdb\n    ipdb.set_trace()\n\n\ndef handle_pmid_27600516(filename):\n    \"\"\"Gloger, ..., Neri Cancer Immunol Immunother 2016 [PMID 27600516]\"\"\"\n    df = pandas.read_csv(filename)\n\n    sample_to_peptides = {}\n    current_sample = None\n    for peptide in df.peptide:\n        if peptide.startswith(\"#\"):\n            current_sample = peptide[1:]\n            sample_to_peptides[current_sample] = []\n        else:\n            assert current_sample is not None\n            sample_to_peptides[current_sample].append(peptide.strip().upper())\n\n    rows = []\n    for (sample, peptides) in sample_to_peptides.items():\n        for peptide in sorted(set(peptides)):\n            rows.append([sample, peptide])\n\n    result_df = pandas.DataFrame(rows, columns=[\"sample_id\", \"peptide\"])\n    result_df[\"sample_type\"] = \"melanoma_cell_line\"\n    result_df[\"cell_line\"] = result_df.sample_id\n    result_df[\"mhc_class\"] = \"I\"\n    result_df[\"pulldown_antibody\"] = \"W6/32\"\n    result_df[\"format\"] = \"multiallelic\"\n    result_df[\"hla\"] = result_df.sample_id.map({\n        \"FM-82\": \"HLA-A*02:01 HLA-A*01:01 HLA-B*08:01 HLA-B*15:01 HLA-C*03:04 HLA-C*07:01\",\n        \"FM-93/2\": \"HLA-A*02:01 HLA-A*26:01 HLA-B*40:01 HLA-B*44:02 HLA-C*03:04 HLA-C*05:01\",\n        \"Mel-624\": \"HLA-A*02:01 HLA-A*03:01 HLA-B*07:02 HLA-B*14:01 HLA-C*07:02 HLA-C*08:02\",\n        \"MeWo\": \"HLA-A*02:01 HLA-A*26:01 HLA-B*14:02 HLA-B*38:01 HLA-C*08:02 HLA-C*12:03\",\n        \"SK-Mel-5\": \"HLA-A*02:01 HLA-A*11:01 HLA-B*40:01 HLA-C*03:03\",\n    })\n    return result_df\n\n\ndef handle_pmid_23481700(filename):\n    \"\"\"Hassan, ..., van Veelen Mol Cell Proteomics 2015 [PMID 23481700]\"\"\"\n    df = pandas.read_excel(filename, skiprows=10)\n    assert df[\"Peptide sequence\"].iloc[0] == \"TPSLVKSTSQL\"\n    assert df[\"Peptide sequence\"].iloc[-1] == \"LPHSVNSKL\"\n\n    hla = {\n        \"JY\": \"HLA-A*02:01 HLA-B*07:02 HLA-C*07:02\",\n        \"HHC\": \"HLA-A*02:01 HLA-B*07:02 HLA-B*44:02 HLA-C*05:01 HLA-C*07:02\",\n    }\n\n    results = []\n    for sample_id in [\"JY\", \"HHC\"]:\n        hits_df = df.loc[\n            df[\"Int %s\" % sample_id].map(\n                lambda x: {\"n.q.\": 0, \"n.q\": 0}.get(x, x)).astype(float) > 0\n        ]\n        result_df = pandas.DataFrame({\n            \"peptide\": hits_df[\"Peptide sequence\"].dropna().values,\n        })\n        result_df[\"sample_id\"] = sample_id\n        result_df[\"cell_line\"] = \"B-LCL-\" + sample_id\n        result_df[\"hla\"] = hla[sample_id]\n        result_df[\"sample_type\"] = \"B-LCL\"\n        result_df[\"mhc_class\"] = \"I\"\n        result_df[\"format\"] = \"multiallelic\"\n        result_df[\"pulldown_antibody\"] = \"W6/32\"\n        results.append(result_df)\n\n    result_df = pandas.concat(results, ignore_index=True)\n\n    # Rename samples to avoid a collision with the JY sample in PMID 25576301.\n    result_df.sample_id = result_df.sample_id.map({\n        \"JY\": \"JY.2015\",\n        \"HHC\": \"HHC.2015\",\n    })\n    return result_df\n\n\ndef handle_pmid_24616531(filename):\n    \"\"\"Mommen, ..., Heck PNAS 2014 [PMID 24616531]\"\"\"\n    df = pandas.read_excel(filename, sheet_name=\"EThcD\")\n    peptides = df.Sequence.values\n    assert peptides[0] == \"APFLRIAF\"\n    assert peptides[-1] == \"WRQAGLSYIRYSQI\"\n\n    result_df = pandas.DataFrame({\n        \"peptide\": peptides,\n    })\n    result_df[\"sample_id\"] = \"24616531\"\n    result_df[\"sample_type\"] = \"B-LCL\"\n    result_df[\"cell_line\"] = \"GR\"\n    result_df[\"pulldown_antibody\"] = \"W6/32\"\n\n    # Note: this publication lists hla as \"HLA-A*01,-03, B*07,-27, and -C*02,-07\"\n    # we are guessing the exact 4 digit alleles based on this.\n    result_df[\"hla\"] = \"HLA-A*01:01 HLA-A*03:01 HLA-B*07:02 HLA-B*27:05 HLA-C*02:02 HLA-C*07:01\"\n    result_df[\"mhc_class\"] = \"I\"\n    result_df[\"format\"] = \"multiallelic\"\n    return result_df\n\n\ndef handle_pmid_25576301(filename):\n    \"\"\"Bassani-Sternberg, ..., Mann Mol Cell Proteomics 2015 [PMID 25576301]\"\"\"\n    df = pandas.read_excel(filename, sheet_name=\"Peptides\")\n    peptides = df.Sequence.values   \n    assert peptides[0] == \"AAAAAAAQSVY\"\n    assert peptides[-1] == \"YYYNGKAVY\"\n\n    column_to_sample = {}\n    for s in [c for c in df if c.startswith(\"Intensity \")]:\n        assert s[-2] == \"-\"\n        column_to_sample[s] = s.replace(\"Intensity \", \"\")[:-2].strip()\n\n    intensity_columns = list(column_to_sample)\n\n    rows = []\n    for _, row in df.iterrows():\n        x1 = row[intensity_columns]\n        x2 = x1[x1 > 0].index.map(column_to_sample).value_counts()\n        x3 = x2[x2 >= 2]  # require at least two replicates for each peptide\n        for sample in x3.index:\n            rows.append((row.Sequence, sample))\n\n    result_df = pandas.DataFrame(rows, columns=[\"peptide\", \"sample_id\"])\n    result_df[\"pulldown_antibody\"] = \"W6/32\"\n    result_df[\"mhc_class\"] = \"I\"\n    result_df[\"format\"] = \"multiallelic\"\n\n    allele_map = {\n        'Fib': \"HLA-A*03:01 HLA-A*23:01 HLA-B*08:01 HLA-B*15:18 HLA-C*07:02 HLA-C*07:04\",\n        'HCC1937': \"HLA-A*23:01 HLA-A*24:02 HLA-B*07:02 HLA-B*40:01 HLA-C*03:04 HLA-C*07:02\",\n        'SupB15WT': None,  # four digit alleles unknown, will drop sample\n        'SupB15RT': None,\n        'HCT116': \"HLA-A*01:01 HLA-A*02:01 HLA-B*45:01 HLA-B*18:01 HLA-C*05:01 HLA-C*07:01\",\n\n        # Homozygous at HLA-A:\n        'HCC1143': \"HLA-A*31:01 HLA-A*31:01 HLA-B*35:08 HLA-B*37:01 HLA-C*04:01 HLA-C*06:02\",\n\n        # Homozygous everywhere:\n        'JY': \"HLA-A*02:01 HLA-A*02:01 HLA-B*07:02 HLA-B*07:02 HLA-C*07:02 HLA-C*07:02\",\n    }\n\n    sample_type = {\n        'Fib': \"fibroblast\",\n        'HCC1937': \"basal like breast cancer\",\n        'SupB15WT': None,\n        'SupB15RT': None,\n        'HCT116': \"colon carcinoma\",\n        'HCC1143': \"basal like breast cancer\",\n        'JY': \"B-cell\",\n    }\n    cell_line = {\n        'Fib': None,\n        'HCC1937': \"HCC1937\",\n        'SupB15WT': None,\n        'SupB15RT': None,\n        'HCT116': \"HCT116\",\n        'HCC1143': \"HCC1143\",\n        'JY': \"JY\",\n    }\n    result_df[\"hla\"] = result_df.sample_id.map(allele_map)\n    print(\"Entries before dropping samples with unknown alleles\", len(result_df))\n    result_df = result_df.loc[~result_df.hla.isnull()]\n    print(\"Entries after dropping samples with unknown alleles\", len(result_df))\n    result_df[\"sample_type\"] = result_df.sample_id.map(sample_type)\n    result_df[\"cell_line\"] = result_df.sample_id.map(cell_line)\n    print(result_df.head(3))\n    return result_df\n\n\ndef handle_pmid_26992070(*filenames):\n    \"\"\"Ritz, ..., Fugmann Proteomics 2016 [PMID 26992070]\"\"\"\n    # Although this publication seems to suggest that HEK293 are C*07:02\n    # (figure 3B), in a subsequent publication [PMID 28834231] this group\n    # gives the HEK293 HLA type as HLA‐A*03:01, HLA‐B*07:02, and HLA‐C*07:01.\n    # We are therefore using the HLA‐C*07:01 (i.e. the latter) typing results\n    # here.\n    allele_text = \"\"\"\n        Cell line\tHLA-A 1\tHLA-A 2\tHLA-B 1\tHLA-B 2\tHLA-C 1\tHLA-C 2\n        HEK293\t03:01\t03:01\t07:02\t07:02\t07:01\t07:01\n        HL-60\t01:01\t01:01\t57:01\t57:01\t06:02\t06:02\n        RPMI8226\t30:01\t68:02\t15:03\t15:10\t02:10\t03:04\n        MAVER-1\t24:02\t26:01\t38:01\t44:02\t05:01\t12:03\n        THP-1\t02:01\t24:02\t15:11\t35:01\t03:03\t03:03\n    \"\"\"\n    allele_info = pandas.read_csv(\n        StringIO(allele_text), sep=\"\\t\", index_col=0)\n    allele_info.index = allele_info.index.str.strip()\n    for gene in [\"A\", \"B\", \"C\"]:\n        for num in [\"1\", \"2\"]:\n            allele_info[\n                \"HLA-%s %s\" % (gene, num)\n            ] = \"HLA-\" + gene + \"*\" + allele_info[\"HLA-%s %s\" % (gene, num)]\n    cell_line_to_allele = allele_info.apply(\" \".join, axis=1)\n\n    sheets = {}\n    for f in filenames:\n        if f.endswith(\".xlsx\"):\n            d = pandas.read_excel(f, sheet_name=None, skiprows=1)\n            sheets.update(d)\n\n    dfs = []\n    for cell_line in cell_line_to_allele.index:\n        # Using data from DeepQuanTR, which appears to be a consensus between\n        # two other methods used.\n        sheet = sheets[cell_line + \"_DeepQuanTR\"]\n        replicated = sheet.loc[\n            sheet[[c for c in sheet if \"Sample\" in c]].fillna(0).sum(1) > 1\n        ]\n        df = pandas.DataFrame({\n            'peptide': replicated.Sequence.values\n        })\n        df[\"sample_id\"] = cell_line\n        df[\"hla\"] = cell_line_to_allele.get(cell_line)\n        dfs.append(df)\n\n    result_df = pandas.concat(dfs, ignore_index=True)\n    result_df[\"pulldown_antibody\"] = \"W6/32\"\n    result_df[\"cell_line\"] = result_df[\"sample_id\"]\n    result_df[\"sample_type\"] = result_df.sample_id.map({\n        \"HEK293\": \"hek\",\n        \"HL-60\": \"neutrophil\",\n        \"RPMI8226\": \"b-cell\",\n        \"MAVER-1\": \"b-LCL\",\n        \"THP-1\": \"monocyte\",\n    })\n    result_df[\"mhc_class\"] = \"I\"\n    result_df[\"format\"] = \"multiallelic\"\n    return result_df\n\n\ndef handle_pmid_27412690(filename):\n    \"\"\"Shraibman, ..., Admon Mol Cell Proteomics 2016 [PMID 27412690]\"\"\"\n    hla_types = {\n        \"U-87\": \"HLA-A*02:01 HLA-B*44:02 HLA-C*05:01\",\n        \"T98G\": \"HLA-A*02:01 HLA-B*39:06 HLA-C*07:02\",\n        \"LNT-229\": \"HLA-A*03:01 HLA-B*35:01 HLA-C*04:01\",\n    }\n    sample_id_to_cell_line = {\n        \"U-87\": \"U-87\",\n        \"T98G\": \"T98G\",\n        \"LNT-229\": \"LNT-229\",\n        \"U-87+DAC\": \"U-87\",\n        \"T98G+DAC\": \"T98G\",\n        \"LNT-229+DAC\": \"LNT-229\",\n    }\n\n    df = pandas.read_excel(filename)\n    assert df.Sequence.iloc[0] == \"AAAAAAGSGTPR\"\n\n    intensity_col_to_sample_id = {}\n    for col in df:\n        if col.startswith(\"Intensity \"):\n            sample_id = col.split()[1]\n            assert sample_id in sample_id_to_cell_line, (col, sample_id)\n            intensity_col_to_sample_id[col] = sample_id\n\n    dfs = []\n    for (sample_id, cell_line) in sample_id_to_cell_line.items():\n        intensity_cols = [\n            c for (c, v) in intensity_col_to_sample_id.items()\n            if v == sample_id\n        ]\n        hits_df = df.loc[\n            (df[intensity_cols] > 0).sum(1) > 1\n        ]\n        result_df = pandas.DataFrame({\n            \"peptide\": hits_df.Sequence.values,\n        })\n        result_df[\"sample_id\"] = sample_id\n        result_df[\"cell_line\"] = cell_line\n        result_df[\"hla\"] = hla_types[cell_line]\n\n        dfs.append(result_df)\n\n    result_df = pandas.concat(dfs, ignore_index=True)\n    result_df[\"sample_type\"] = \"glioblastoma\"\n    result_df[\"pulldown_antibody\"] = \"W6/32\"\n    result_df[\"mhc_class\"] = \"I\"\n    result_df[\"format\"] = \"multiallelic\"\n    return result_df\n\n\ndef handle_pmid_28832583(*filenames):\n    \"\"\"Bassani-Sternberg, ..., Gfeller PLOS Comp. Bio. 2017 [PMID 28832583]\"\"\"\n    # This work also reanalyzes data from\n    # Pearson, ..., Perreault J Clin Invest 2016 [PMID 27841757]\n\n    (filename_dataset1, filename_dataset2) = sorted(filenames)\n\n    dataset1 = pandas.read_csv(filename_dataset1, sep=\"\\t\")\n    dataset2 = pandas.read_csv(filename_dataset2, sep=\"\\t\")\n    df = pandas.concat([dataset1, dataset2], ignore_index=True, sort=False)\n\n    info_text = \"\"\"\n    cell_line\torigin\toriginal_pmid\tallele1\tallele2\tallele3\tallele4\tallele5\tallele6\n    CD165\tB-cell\t28832583\tHLA-A*02:05\tHLA-A*24:02\tHLA-B*15:01\tHLA-B*50:01\tHLA-C*03:03\tHLA-C*06:02\n    CM467\tB-cell\t28832583\tHLA-A*01:01\tHLA-A*24:02\tHLA-B*13:02\tHLA-B*39:06\tHLA-C*06:02\tHLA-C*12:03\n    GD149\tB-cell\t28832583\tHLA-A*01:01\tHLA-A*24:02\tHLA-B*38:01\tHLA-B*44:03\tHLA-C*06:02\tHLA-C*12:03\n    MD155\tB-cell\t28832583\tHLA-A*02:01\tHLA-A*24:02\tHLA-B*15:01\tHLA-B*18:01\tHLA-C*03:03\tHLA-C*07:01\n    PD42\tB-cell\t28832583\tHLA-A*02:06\tHLA-A*24:02\tHLA-B*07:02\tHLA-B*55:01\tHLA-C*01:02\tHLA-C*07:02\n    RA957\tB-cell\t28832583\tHLA-A*02:20\tHLA-A*68:01\tHLA-B*35:03\tHLA-B*39:01\tHLA-C*04:01\tHLA-C*07:02\n    TIL1\tTIL\t28832583\tHLA-A*02:01\tHLA-A*02:01\tHLA-B*18:01\tHLA-B*38:01\tHLA-C*05:01\t\n    TIL3\tTIL\t28832583\tHLA-A*01:01\tHLA-A*23:01\tHLA-B*07:02\tHLA-B*15:01\tHLA-C*12:03\tHLA-C*14:02\n    Apher1\tLeukapheresis\t28832583\tHLA-A*03:01\tHLA-A*29:02\tHLA-B*44:02\tHLA-B*44:03\tHLA-C*12:03\tHLA-C*16:01\n    Apher6\tLeukapheresis\t28832583\tHLA-A*02:01\tHLA-A*03:01\tHLA-B*07:02\t\tHLA-C*07:02\t\n    pat_AC2\tB-LCL\t27841757\tHLA-A*03:01\tHLA-A*32:01\tHLA-B*27:05\tHLA-B*45:01\t\t\n    pat_C\tB-LCL\t27841757\tHLA-A*02:01\tHLA-A*03:01\tHLA-B*07:02\t\tHLA-C*07:02\t\n    pat_CELG\tB-LCL\t27841757\tHLA-A*02:01\tHLA-A*24:02\tHLA-B*15:01\tHLA-B*73:01\tHLA-C*03:03\tHLA-C*15:05\n    pat_CP2\tB-LCL\t27841757\tHLA-A*11:01\t\tHLA-B*14:02\tHLA-B*44:02\t\t\n    pat_FL\tB-LCL\t27841757\tHLA-A*03:01\tHLA-A*11:01\tHLA-B*44:03\tHLA-B*50:01\t\t\n    pat_J\tB-LCL\t27841757\tHLA-A*02:01\tHLA-A*03:01\tHLA-B*07:02\t\tHLA-C*07:02\t\n    pat_JPB3\tB-LCL\t27841757\tHLA-A*02:01\tHLA-A*11:01\tHLA-B*27:05\tHLA-B*56:01\t\t\n    pat_JT2\tB-LCL\t27841757\tHLA-A*11:01\t\tHLA-B*18:03\tHLA-B*35:01\t\t\n    pat_M\tB-LCL\t27841757\tHLA-A*03:01\tHLA-A*29:02\tHLA-B*08:01\tHLA-B*44:03\tHLA-C*07:01\tHLA-C*16:01\n    pat_MA\tB-LCL\t27841757\tHLA-A*02:01\tHLA-A*29:02\tHLA-B*44:03\tHLA-B*57:01\tHLA-C*07:01\tHLA-C*16:01\n    pat_ML\tB-LCL\t27841757\tHLA-A*02:01\tHLA-A*11:01\tHLA-B*40:01\tHLA-B*44:03\t\t\n    pat_NS2\tB-LCL\t27841757\tHLA-A*02:01\t\tHLA-B*13:02\tHLA-B*41:01\t\t\n    pat_NT\tB-LCL\t27841757\tHLA-A*01:01\tHLA-A*32:01\tHLA-B*08:01\t\t\t\n    pat_PF1\tB-LCL\t27841757\tHLA-A*01:01\tHLA-A*02:01\tHLA-B*07:02\tHLA-B*44:03\tHLA-C*07:02\tHLA-C*16:01\n    pat_R\tB-LCL\t27841757\tHLA-A*03:01\tHLA-A*29:02\tHLA-B*08:01\tHLA-B*44:03\tHLA-C*07:01\tHLA-C*16:01\n    pat_RT\tB-LCL\t27841757\tHLA-A*01:01\tHLA-A*02:01\tHLA-B*18:01\tHLA-B*39:24\tHLA-C*05:01\tHLA-C*07:01\n    pat_SR\tB-LCL\t27841757\tHLA-A*02:01\tHLA-A*23:01\tHLA-B*18:01\tHLA-B*44:03\t\t\n    pat_ST\tB-LCL\t27841757\tHLA-A*03:01\tHLA-A*24:02\tHLA-B*07:02\tHLA-B*27:05\n    \"\"\"\n    info_df = pandas.read_csv(StringIO(info_text), sep=\"\\t\", index_col=0)\n    info_df.index = info_df.index.str.strip()\n\n    info_df[\"hla\"] = info_df[\n        [c for c in info_df if c.startswith(\"allele\")]\n    ].fillna(\"\").apply(\" \".join, axis=1)\n\n    results = []\n    for col in df.columns:\n        if col.startswith(\"Intensity \"):\n            sample_id = col.replace(\"Intensity \", \"\")\n            assert sample_id in info_df.index, sample_id\n            peptides = df.loc[df[col].fillna(0) > 0].Sequence.unique()\n            result_df = pandas.DataFrame({\"peptide\": peptides})\n            result_df[\"sample_id\"] = sample_id\n            result_df[\"hla\"] = info_df.loc[sample_id].hla\n            result_df[\"sample_type\"] = info_df.loc[sample_id].origin\n            result_df[\"original_pmid\"] = str(\n                info_df.loc[sample_id].original_pmid)\n            results.append(result_df)\n\n    result_df = pandas.concat(results, ignore_index=True)\n    samples = result_df.sample_id.unique()\n    for sample_id in info_df.index:\n        assert sample_id in samples, (sample_id, samples)\n\n    result_df[\"mhc_class\"] = \"I\"\n    result_df[\"format\"] = \"multiallelic\"\n    result_df[\"cell_line\"] = \"\"\n    result_df[\"pulldown_antibody\"] = \"W6/32\"\n    return result_df\n\n\nPMID_31495665_SAMPLE_TYPES = {\n        \"HLA-DR_Lung\": \"lung\",\n        \"HLA-DR_PBMC_HDSC\": \"pbmc\",\n        \"HLA-DR_PBMC_RG1095\": \"pbmc\",\n        \"HLA-DR_PBMC_RG1104\": \"pbmc\",\n        \"HLA-DR_PBMC_RG1248\": \"pbmc\",\n        \"HLA-DR_Spleen\": \"spleen\",\n        \"MAPTAC_A*02:01\": \"mix:a375,expi293,hek293,hela\",\n        \"MAPTAC_A*11:01\": \"mix:expi293,hela\",\n        \"MAPTAC_A*32:01\": \"mix:a375,expi293,hela\",\n        \"MAPTAC_B*07:02\": \"mix:a375,expi293,hela\",\n        \"MAPTAC_B*45:01\": \"expi293\",\n        \"MAPTAC_B*52:01\": \"mix:a375,expi293\",\n        \"MAPTAC_C*03:03\": \"expi293\",\n        \"MAPTAC_C*06:02\": \"mix:a375,expi293\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm+\": \"expi293\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm-\": \"expi293\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm+\": \"expi293\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm-\": \"expi293\",\n        \"MAPTAC_DRB1*01:01\": \"mix:a375,b721,expi293,kg1,k562\",\n        \"MAPTAC_DRB1*03:01\": \"expi293\",\n        \"MAPTAC_DRB1*04:01\": \"expi293\",\n        \"MAPTAC_DRB1*07:01\": \"mix:expi293,hek293\",\n        \"MAPTAC_DRB1*11:01\": \"mix:expi293,k562,kg1\",\n        \"MAPTAC_DRB1*12:01_dm+\": \"expi293\",\n        \"MAPTAC_DRB1*12:01_dm-\": \"expi293\",\n        \"MAPTAC_DRB1*15:01\": \"expi293\",\n        \"MAPTAC_DRB3*01:01_dm+\": \"expi293\",\n        \"MAPTAC_DRB3*01:01_dm-\": \"expi293\",\n}\nCELL_LINE_MIXTURES = sorted(\n    set(\n        x for x in PMID_31495665_SAMPLE_TYPES.values()\n        if x.startswith(\"mix:\")))\n\n\ndef handle_pmid_31495665(filename):\n    \"\"\"Abelin, ..., Rooney Immunity 2019 [PMID 31495665]\"\"\"\n    hla_type = {\n        \"HLA-DR_A375\": None,\n        \"HLA-DR_Lung\": \"DRB1*01:01 DRB1*03:01 DRB3*01:01\",\n        \"HLA-DR_PBMC_HDSC\": \"DRB1*03:01 DRB1*11:01 DRB3*01:01 DRB3*02:02\",\n        \"HLA-DR_PBMC_RG1095\": \"HLA-DRA1*01:01-DRB1*03:01 HLA-DRA1*01:01-DRB1*11:01 HLA-DRA1*01:01-DRB3*01:01 HLA-DRA1*01:01-DRB3*02:02\",\n        \"HLA-DR_PBMC_RG1104\": \"DRB1*01:01 DRB1*11:01 DRB3*02:02\",\n        \"HLA-DR_PBMC_RG1248\": \"DRB1*03:01 DRB1*03:01 DRB3*01:01 DRB3*01:01\",\n        \"HLA-DR_SILAC_Donor1_10minLysate\": None,\n        \"HLA-DR_SILAC_Donor1_5hrLysate\": None,\n        \"HLA-DR_SILAC_Donor1_DConly\": None,\n        \"HLA-DR_SILAC_Donor1_UVovernight\": None,\n        \"HLA-DR_SILAC_Donor2_DC_UV_16hr\": None,\n        \"HLA-DR_SILAC_Donor2_DC_UV_24hr\": None,\n        \"HLA-DR_Spleen\": \"DRB1*04:01 DRB4*01:03 DRB1*15:03 DRB5*01:01\",\n        \"MAPTAC_A*02:01\": \"HLA-A*02:01\",\n        \"MAPTAC_A*11:01\": \"HLA-A*11:01\",\n        \"MAPTAC_A*32:01\": \"HLA-A*32:01\",\n        \"MAPTAC_B*07:02\": \"HLA-B*07:02\",\n        \"MAPTAC_B*45:01\": \"HLA-B*45:01\",\n        \"MAPTAC_B*52:01\": \"HLA-B*52:01\",\n        \"MAPTAC_C*03:03\": \"HLA-C*03:03\",\n        \"MAPTAC_C*06:02\": \"HLA-C*06:02\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm+\": \"HLA-DPB1*06:01-DPA1*01:03\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm-\": \"HLA-DPB1*06:01-DPA1*01:03\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm+\": \"HLA-DQB1*06:04-DQA1*01:02\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm-\": \"HLA-DQB1*06:04-DQA1*01:02\",\n        \"MAPTAC_DRB1*01:01\": \"HLA-DRA1*01:01-DRB1*01:01\",\n        \"MAPTAC_DRB1*03:01\": \"HLA-DRA1*01:01-DRB1*03:01\",\n        \"MAPTAC_DRB1*04:01\": \"HLA-DRA1*01:01-DRB1*04:01\",\n        \"MAPTAC_DRB1*07:01\": \"HLA-DRA1*01:01-DRB1*07:01\",\n        \"MAPTAC_DRB1*11:01\": \"HLA-DRA1*01:01-DRB1*11:01\",\n        \"MAPTAC_DRB1*12:01_dm+\": \"HLA-DRA1*01:01-DRB1*12:01\",\n        \"MAPTAC_DRB1*12:01_dm-\": \"HLA-DRA1*01:01-DRB1*12:01\",\n        \"MAPTAC_DRB1*15:01\": \"HLA-DRA1*01:01-DRB1*15:01\",\n        \"MAPTAC_DRB3*01:01_dm+\": \"HLA-DRA1*01:01-DRB3*01:01\",\n        \"MAPTAC_DRB3*01:01_dm-\": \"HLA-DRA1*01:01-DRB3*01:01\",\n    }\n    pulldown_antibody = {\n        \"HLA-DR_Lung\": \"L243 (HLA-DR)\",\n        \"HLA-DR_PBMC_HDSC\": \"tal1b5 (HLA-DR)\",\n        \"HLA-DR_PBMC_RG1095\": \"tal1b5 (HLA-DR)\",\n        \"HLA-DR_PBMC_RG1104\": \"tal1b5 (HLA-DR)\",\n        \"HLA-DR_PBMC_RG1248\": \"tal1b5 (HLA-DR)\",\n        \"HLA-DR_Spleen\": \"L243 (HLA-DR)\",\n        \"MAPTAC_A*02:01\": \"MAPTAC\",\n        \"MAPTAC_A*11:01\": \"MAPTAC\",\n        \"MAPTAC_A*32:01\": \"MAPTAC\",\n        \"MAPTAC_B*07:02\": \"MAPTAC\",\n        \"MAPTAC_B*45:01\": \"MAPTAC\",\n        \"MAPTAC_B*52:01\": \"MAPTAC\",\n        \"MAPTAC_C*03:03\": \"MAPTAC\",\n        \"MAPTAC_C*06:02\": \"MAPTAC\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm+\": \"MAPTAC\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm-\": \"MAPTAC\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm+\": \"MAPTAC\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm-\": \"MAPTAC\",\n        \"MAPTAC_DRB1*01:01\": \"MAPTAC\",\n        \"MAPTAC_DRB1*03:01\": \"MAPTAC\",\n        \"MAPTAC_DRB1*04:01\": \"MAPTAC\",\n        \"MAPTAC_DRB1*07:01\": \"MAPTAC\",\n        \"MAPTAC_DRB1*11:01\": \"MAPTAC\",\n        \"MAPTAC_DRB1*12:01_dm+\": \"MAPTAC\",\n        \"MAPTAC_DRB1*12:01_dm-\": \"MAPTAC\",\n        \"MAPTAC_DRB1*15:01\": \"MAPTAC\",\n        \"MAPTAC_DRB3*01:01_dm+\": \"MAPTAC\",\n        \"MAPTAC_DRB3*01:01_dm-\": \"MAPTAC\",\n    }\n    format = {\n        \"HLA-DR_Lung\": \"DR-specific\",\n        \"HLA-DR_PBMC_HDSC\": \"DR-specific\",\n        \"HLA-DR_PBMC_RG1095\": \"DR-specific\",\n        \"HLA-DR_PBMC_RG1104\": \"DR-specific\",\n        \"HLA-DR_PBMC_RG1248\": \"DR-specific\",\n        \"HLA-DR_Spleen\": \"DR-specific\",\n        \"MAPTAC_A*02:01\": \"monoallelic\",\n        \"MAPTAC_A*11:01\": \"monoallelic\",\n        \"MAPTAC_A*32:01\": \"monoallelic\",\n        \"MAPTAC_B*07:02\": \"monoallelic\",\n        \"MAPTAC_B*45:01\": \"monoallelic\",\n        \"MAPTAC_B*52:01\": \"monoallelic\",\n        \"MAPTAC_C*03:03\": \"monoallelic\",\n        \"MAPTAC_C*06:02\": \"monoallelic\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm+\": \"monoallelic\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm-\": \"monoallelic\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm+\": \"monoallelic\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm-\": \"monoallelic\",\n        \"MAPTAC_DRB1*01:01\": \"monoallelic\",\n        \"MAPTAC_DRB1*03:01\": \"monoallelic\",\n        \"MAPTAC_DRB1*04:01\": \"monoallelic\",\n        \"MAPTAC_DRB1*07:01\": \"monoallelic\",\n        \"MAPTAC_DRB1*11:01\": \"monoallelic\",\n        \"MAPTAC_DRB1*12:01_dm+\": \"monoallelic\",\n        \"MAPTAC_DRB1*12:01_dm-\": \"monoallelic\",\n        \"MAPTAC_DRB1*15:01\": \"monoallelic\",\n        \"MAPTAC_DRB3*01:01_dm+\": \"monoallelic\",\n        \"MAPTAC_DRB3*01:01_dm-\": \"monoallelic\",\n    }\n    mhc_class = {\n        \"HLA-DR_Lung\": \"II\",\n        \"HLA-DR_PBMC_HDSC\": \"II\",\n        \"HLA-DR_PBMC_RG1095\": \"II\",\n        \"HLA-DR_PBMC_RG1104\": \"II\",\n        \"HLA-DR_PBMC_RG1248\": \"II\",\n        \"HLA-DR_Spleen\": \"II\",\n        \"MAPTAC_A*02:01\": \"I\",\n        \"MAPTAC_A*11:01\": \"I\",\n        \"MAPTAC_A*32:01\": \"I\",\n        \"MAPTAC_B*07:02\": \"I\",\n        \"MAPTAC_B*45:01\": \"I\",\n        \"MAPTAC_B*52:01\": \"I\",\n        \"MAPTAC_C*03:03\": \"I\",\n        \"MAPTAC_C*06:02\": \"I\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm+\": \"II\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm-\": \"II\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm+\": \"II\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm-\": \"II\",\n        \"MAPTAC_DRB1*01:01\": \"II\",\n        \"MAPTAC_DRB1*03:01\": \"II\",\n        \"MAPTAC_DRB1*04:01\": \"II\",\n        \"MAPTAC_DRB1*07:01\": \"II\",\n        \"MAPTAC_DRB1*11:01\": \"II\",\n        \"MAPTAC_DRB1*12:01_dm+\": \"II\",\n        \"MAPTAC_DRB1*12:01_dm-\": \"II\",\n        \"MAPTAC_DRB1*15:01\": \"II\",\n        \"MAPTAC_DRB3*01:01_dm+\": \"II\",\n        \"MAPTAC_DRB3*01:01_dm-\": \"II\",\n    }\n    cell_line = {\n        \"HLA-DR_Lung\": \"\",\n        \"HLA-DR_PBMC_HDSC\": \"\",\n        \"HLA-DR_PBMC_RG1095\": \"\",\n        \"HLA-DR_PBMC_RG1104\": \"\",\n        \"HLA-DR_PBMC_RG1248\": \"\",\n        \"HLA-DR_Spleen\": \"\",\n        \"MAPTAC_A*02:01\": \"\",\n        \"MAPTAC_A*11:01\": \"\",\n        \"MAPTAC_A*32:01\": \"\",\n        \"MAPTAC_B*07:02\": \"\",\n        \"MAPTAC_B*45:01\": \"expi293\",\n        \"MAPTAC_B*52:01\": \"\",\n        \"MAPTAC_C*03:03\": \"expi293\",\n        \"MAPTAC_C*06:02\": \"\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm+\": \"expi293\",\n        \"MAPTAC_DPB1*06:01/DPA1*01:03_dm-\": \"expi293\",\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm+\": \"expi293\",  # don't actually see this in DataS1A!\n        \"MAPTAC_DQB1*06:04/DQA1*01:02_dm-\": \"expi293\",\n        \"MAPTAC_DRB1*01:01\": \"\",\n        \"MAPTAC_DRB1*03:01\": \"expi293\",\n        \"MAPTAC_DRB1*04:01\": \"expi293\",\n        \"MAPTAC_DRB1*07:01\": \"\",\n        \"MAPTAC_DRB1*11:01\": \"\",\n        \"MAPTAC_DRB1*12:01_dm+\": \"expi293\",\n        \"MAPTAC_DRB1*12:01_dm-\": \"expi293\",\n        \"MAPTAC_DRB1*15:01\": \"expi293\",\n        \"MAPTAC_DRB3*01:01_dm+\": \"expi293\",\n        \"MAPTAC_DRB3*01:01_dm-\": \"expi293\",\n    }\n\n\n    df = pandas.read_excel(filename, sheet_name=\"DataS1B\")\n    results = []\n    for sample_id in df.columns:\n        if hla_type[sample_id] is None:\n            print(\"Intentionally skipping\", sample_id)\n            continue\n\n        result_df = pandas.DataFrame({\n            \"peptide\": df[sample_id].dropna().values,\n        })\n        result_df[\"sample_id\"] = sample_id\n        result_df[\"hla\"] = hla_type[sample_id]\n        result_df[\"pulldown_antibody\"] = pulldown_antibody[sample_id]\n        result_df[\"format\"] = format[sample_id]\n        result_df[\"mhc_class\"] = mhc_class[sample_id]\n        result_df[\"sample_type\"] = PMID_31495665_SAMPLE_TYPES[sample_id]\n        result_df[\"cell_line\"] = cell_line[sample_id]\n        results.append(result_df)\n    result_df = pandas.concat(results, ignore_index=True)\n\n    # Remove class II for now\n    result_df = result_df.loc[result_df.mhc_class == \"I\"]\n    return result_df\n\n\ndef handle_pmid_27869121(filename):\n    \"\"\"Bassani-Sternberg, ..., Krackhardt Nature Comm. 2016 [PMID 27869121]\"\"\"\n    # Although this dataset has class II data also, we are only extracting\n    # class I for now.\n    df = pandas.read_excel(filename, skiprows=1)\n\n    # Taking these from:\n    # Supplementary Table 2: Information of patients selected for neoepitope\n    # identification\n    # For the Mel5 sample, only two-digit alleles are shown (A*01, A*25,\n    # B*08, B*18) so we are skipping that sample for now.\n    hla_df = pandas.DataFrame([\n        (\"Mel-8\", \"HLA-A*01:01 HLA-A*03:01 HLA-B*07:02 HLA-B*08:01 HLA-C*07:01 HLA-C*07:02\"),\n        (\"Mel-12\", \"HLA-A*01:01 HLA-B*08:01 HLA-C*07:01\"),\n        (\"Mel-15\", \"HLA-A*03:01 HLA-A*68:01 HLA-B*27:05 HLA-B*35:03 HLA-C*02:02 HLA-C*04:01\"),\n        (\"Mel-16\", \"HLA-A*01:01 HLA-A*24:02 HLA-B*07:02 HLA-B*08:01 HLA-C*07:01 HLA-C*07:02\"),\n    ], columns=[\"sample_id\", \"hla\"]).set_index(\"sample_id\")\n\n    # We assert below that none of the class I hit peptides were found in any\n    # of the class II pull downs.\n    class_ii_cols = [\n        c for c in df.columns if c.endswith(\"HLA-II (arbitrary units)\")\n    ]\n    class_ii_hits = set(df.loc[\n        (df[class_ii_cols].fillna(0.0).sum(1) > 0)\n    ].Sequence.unique())\n\n    results = []\n    for (sample_id, hla) in hla_df.hla.items():\n        intensity_col = \"Intensity %s_HLA-I (arbitrary units)\" % sample_id\n        sub_df = df.loc[\n            (df[intensity_col].fillna(0.0) > 0)\n        ]\n        filtered_sub_df = sub_df.loc[\n            (~sub_df.Sequence.isin(class_ii_hits))\n        ]\n        peptides = filtered_sub_df.Sequence.unique()\n        assert not any(p in class_ii_hits for p in peptides)\n\n        result_df = pandas.DataFrame({\n            \"peptide\": peptides,\n        })\n        result_df[\"sample_id\"] = sample_id\n        result_df[\"hla\"] = hla_df.loc[sample_id, \"hla\"]\n        result_df[\"pulldown_antibody\"] = \"W6/32\"\n        result_df[\"format\"] = \"multiallelic\"\n        result_df[\"mhc_class\"] = \"I\"\n        result_df[\"sample_type\"] = \"melanoma_met\"\n        result_df[\"cell_line\"] = None\n        results.append(result_df)\n\n    result_df = pandas.concat(results, ignore_index=True)\n    return result_df\n\n\ndef handle_pmid_31154438(*filenames):\n    \"\"\"Shraibman, ..., Admon Mol Cell Proteomics 2019 [PMID 31154438]\"\"\"\n    # Note: this publication also includes analyses of the secreted HLA\n    # peptidedome (sHLA) but we are using only the data from membrane-bound\n    # HLA.\n    (xls, txt) = sorted(filenames, key=lambda s: not s.endswith(\".xlsx\"))\n\n    info = pandas.read_excel(xls, skiprows=1)\n    df = pandas.read_csv(txt, sep=\"\\t\", skiprows=1)\n\n    hla_df = info.loc[\n        ~info[\"mHLA tissue sample\"].isnull()\n    ].set_index(\"mHLA tissue sample\")[[\"HLA typing\"]]\n\n    def fix_hla(string):\n        result = []\n        alleles = string.split(\";\")\n        for a in alleles:\n            a = a.strip()\n            if \"/\" in a:\n                (a1, a2) = a.split(\"/\")\n                a2 = a1[:2] + a2\n                lst = [a1, a2]\n            else:\n                lst = [a]\n            for a in lst:\n                normalized = normalize_allele_name_or_return_unknown(a)\n                result.append(normalized)\n        return \" \".join(result)\n\n    hla_df[\"hla\"] = hla_df[\"HLA typing\"].map(fix_hla)\n\n    results = []\n    for (sample_id, hla) in hla_df.hla.items():\n        intensity_col = \"Intensity %s\" % sample_id\n        sub_df = df.loc[\n            (df[intensity_col].fillna(0.0) > 0)\n        ]\n        peptides = sub_df.Sequence.unique()\n\n        result_df = pandas.DataFrame({\n            \"peptide\": peptides,\n        })\n        result_df[\"sample_id\"] = sample_id\n        result_df[\"hla\"] = hla_df.loc[sample_id, \"hla\"]\n        result_df[\"pulldown_antibody\"] = \"W6/32\"\n        result_df[\"format\"] = \"multiallelic\"\n        result_df[\"mhc_class\"] = \"I\"\n        result_df[\"sample_type\"] = \"glioblastoma_tissue\"\n        result_df[\"cell_line\"] = None\n        results.append(result_df)\n\n    result_df = pandas.concat(results, ignore_index=True)\n    return result_df\n\n\ndef handle_pmid_31844290(*filenames):\n    \"\"\"Sarkizova, ..., Keskin Nature Biotechnology 2019 [PMID 31844290]\"\"\"\n    (mono_filename, multi_filename) = sorted(filenames)\n\n    # Monoallelic\n    mono = pandas.read_excel(mono_filename, sheet_name=None)\n    dfs = []\n    for (key, value) in mono.items():\n        if key == 'Sheet1':\n            continue\n        allele_before_normalization = key\n        if not allele_before_normalization.startswith(\"HLA-\"):\n            allele_before_normalization = \"HLA-\" + allele_before_normalization\n        allele = normalize_allele_name(allele_before_normalization)\n        assert allele != \"UNKNOWN\"\n        df = pandas.DataFrame({\"peptide\": value.sequence.values})\n        df[\"sample_id\"] = \"keskin_%s\" % key\n        df[\"hla\"] = allele\n        df[\"pulldown_antibody\"] = \"W6/32\"\n        df[\"format\"] = \"monoallelic\"\n        df[\"mhc_class\"] = \"I\"\n        df[\"sample_type\"] = \"B-CELL\"\n        df[\"cell_line\"] = \"b721\"\n        dfs.append(df)\n\n    # Multiallelic\n    multi = pandas.read_excel(multi_filename, sheet_name=None)\n    metadata = multi['Tissue Sample Characteristics']\n    allele_table = metadata.drop_duplicates(\n        \"Clinical ID\").set_index(\"Clinical ID\").loc[\n        :, [c for c in metadata if c.startswith(\"HLA-\")]\n    ]\n    allele_table = allele_table.loc[~allele_table.index.isnull()]\n    allele_table = allele_table.loc[allele_table[\"HLA-A\"] != 'n.d.']\n    allele_table = allele_table.applymap(\n        lambda s: s[1:] if s.startswith(\"-\") else s)\n    allele_table = allele_table.applymap(\n        lambda s: \"B5101\" if s == \"B51\" else s)\n    allele_table = allele_table.applymap(normalize_allele_name_or_return_unknown)\n\n    sample_info = metadata.drop_duplicates(\n        \"Clinical ID\").set_index(\"Clinical ID\")[['Cancer type', 'IP Ab']]\n    sample_info = sample_info.loc[~sample_info.index.isnull()].fillna(\n        method='ffill')\n    sample_info = sample_info.loc[sample_info.index.isin(allele_table.index)]\n    sample_info = sample_info.loc[allele_table.index]\n    sample_info[\"hla\"] = [\n        \" \".join(row).replace(\"HLA-A*31:0102\", \"HLA-A*31:01\")  # fix a typo\n        for _, row in allele_table.iterrows()\n    ]\n    sample_info[\"sample_type\"] = sample_info['Cancer type'].map({\n        'CLL': \"B-CELL\",\n        'GBM': \"GLIOBLASTOMA_TISSUE\",\n        'Melanoma': \"MELANOMA\",\n        \"Ovarian\": \"OVARY\",\n        'ccRCC': \"KIDNEY\",\n    })\n    assert not sample_info[\"sample_type\"].isnull().any()\n    assert not sample_info[\"hla\"].str.contains(\"UNKNOWN\").any()\n\n    for (key, value) in multi.items():\n        if key == 'Tissue Sample Characteristics':\n            continue\n        for (directory, sub_df) in value.groupby(\"directory\"):\n            if 'Pat7' in directory or 'Pat9' in directory:\n                print(\"Skipping due to no HLA typing\", directory)\n                continue\n            try:\n                (sample_id,) = sample_info.loc[\n                    sample_info.index.map(\n                        lambda idx: (\n                            idx in directory or\n                            idx.replace(\"-\", \"_\").replace(\"MEL_\", \"\") in directory or\n                            idx.replace(\" \", \"_\") in directory\n                        ))\n                ].index\n            except ValueError as e:\n                print(directory, e)\n                import ipdb ; ipdb.set_trace()\n            info = sample_info.loc[sample_id]\n            df = pandas.DataFrame({\"peptide\": sub_df.sequence.values})\n            df[\"sample_id\"] = \"keskin_%s\" % sample_id.replace(\" \", \"_\")\n            df[\"hla\"] = info['hla']\n            df[\"pulldown_antibody\"] = info['IP Ab']\n            df[\"format\"] = \"multiallelic\"\n            df[\"mhc_class\"] = \"I\"\n            df[\"sample_type\"] = info['sample_type']\n            df[\"cell_line\"] = None\n            dfs.append(df)\n\n    result_df = pandas.concat(dfs, ignore_index=True)\n    result_df[\"peptide\"] = result_df.peptide.str.upper()\n    return result_df\n\n\nEXPRESSION_GROUPS_ROWS = []\n\n\ndef make_expression_groups(dataset_identifier, df, groups):\n    result_df = pandas.DataFrame(index=df.index)\n    for (label, columns) in groups.items():\n        for col in columns:\n            if col not in df.columns:\n                raise ValueError(\n                    \"Missing: %s. Available: %s\" % (col, df.columns.tolist()))\n        result_df[label] = df[columns].mean(1)\n        EXPRESSION_GROUPS_ROWS.append((dataset_identifier, label, columns))\n    return result_df\n\n\ndef handle_expression_GSE113126(*filenames):\n    \"\"\"\n    Barry, ..., Krummel Nature Medicine 2018 [PMID 29942093]\n\n    This is the melanoma met RNA-seq dataset.\n\n    \"\"\"\n\n    df = pandas.read_csv(filenames[0], sep=\"\\t\", index_col=0)\n    df = df[[]]  # no columns\n\n    for filename in filenames:\n        df[os.path.basename(filename)] = pandas.read_csv(\n            filename, sep=\"\\t\", index_col=0)[\"TPM\"]\n\n    assert len(df.columns) == len(filenames)\n\n    groups = {\n        \"sample_type:MELANOMA_MET\": df.columns.tolist(),\n    }\n    return [make_expression_groups(\"GSE113126\", df, groups)]\n\n\ndef handle_expression_expression_atlas_22460905(filename):\n    df = pandas.read_csv(filename, sep=\"\\t\", skiprows=4, index_col=0)\n    del df[\"Gene Name\"]\n    df.columns = df.columns.str.lower()\n    df = df.fillna(0.0)\n\n    def matches(*strings):\n        return [c for c in df.columns if all(s in c for s in strings)]\n\n    groups = {\n        \"sample_type:B-LCL\": (\n            matches(\"b-cell\", \"lymphoblast\") + matches(\"b acute lymphoblastic\")),\n        \"sample_type:B-CELL\": matches(\"b-cell\"),\n        \"sample_type:B721-LIKE\": matches(\"b-cell\"),\n        \"sample_type:MELANOMA_CELL_LINE\": matches(\"melanoma\"),\n        \"sample_type:MELANOMA\": matches(\"melanoma\"),\n        \"sample_type:A375-LIKE\": matches(\"melanoma\"),\n        \"sample_type:KG1-LIKE\": matches(\"myeloid leukemia\"),\n\n        # Using a fibrosarcoma cell line for our fibroblast sample.\n        \"sample_type:FIBROBLAST\": ['fibrosarcoma, ht-1080'],\n\n        # For GBM tissue we are just using a mixture of cell lines.\n        \"sample_type:GLIOBLASTOMA_TISSUE\": matches(\"glioblastoma\"),\n\n        \"cell_line:THP-1\": [\"childhood acute monocytic leukemia, thp-1\"],\n        \"cell_line:HL-60\": [\"adult acute myeloid leukemia, hl-60\"],\n        \"cell_line:U-87\": ['glioblastoma, u-87 mg'],\n        \"cell_line:LNT-229\": ['glioblastoma, ln-229'],\n        \"cell_line:T98G\": ['glioblastoma, t98g'],\n        \"cell_line:SK-MEL-5\": ['cutaneous melanoma, sk-mel-5'],\n        'cell_line:MEWO': ['melanoma, mewo'],\n        \"cell_line:HCC1937\": ['breast ductal adenocarcinoma, hcc1937'],\n        \"cell_line:HCT116\": ['colon carcinoma, hct 116'],\n        \"cell_line:HCC1143\": ['breast ductal adenocarcinoma, hcc1143'],\n    }\n    return [make_expression_groups(\"expression_atlas_22460905\", df, groups)]\n\n\ndef handle_expression_human_protein_atlas(*filenames):\n    (cell_line_filename,) = [f for f in filenames if \"celline\" in f]\n    (blood_filename,) = [f for f in filenames if \"blood\" in f]\n    (gtex_filename,) = [f for f in filenames if \"gtex\" in f]\n\n    cell_line_df = pandas.read_csv(cell_line_filename, sep=\"\\t\")\n    blood_df = pandas.read_csv(blood_filename, sep=\"\\t\", index_col=0)\n    gtex_df = pandas.read_csv(gtex_filename, sep=\"\\t\")\n\n    cell_line_df = cell_line_df.pivot(\n        index=\"Gene\", columns=\"Cell line\", values=\"TPM\")\n\n    gtex_df = gtex_df.pivot(\n        index=\"Gene\", columns=\"Tissue\", values=\"TPM\")\n\n    return [\n        make_expression_groups(\n            \"human_protein_atlas:%s\" % os.path.basename(blood_filename),\n            blood_df,\n            groups={\n                \"sample_type:PBMC\": [\n                    c for c in blood_df.columns if \"total PBMC\" in c\n                ],\n\n                # for samples labeled leukapheresis we also use PBMC\n                \"sample_type:LEUKAPHERESIS\": [\n                    c for c in blood_df.columns if \"total PBMC\" in c\n                ],\n\n                # for samples labeled TIL we are also using PBMC\n                \"sample_type:TIL\": [\n                    c for c in blood_df.columns if \"total PBMC\" in c\n                ],\n            }),\n        make_expression_groups(\n            \"human_protein_atlas:%s\" % os.path.basename(cell_line_filename),\n            cell_line_df,\n            groups={\n                \"cell_line:HELA\": ['HeLa'],\n                \"cell_line:K562\": [\"K-562\"],\n                \"cell_line:HEK293\": ['HEK 293'],\n                \"cell_line:RPMI8226\": ['RPMI-8226'],\n                \"cell_line:EXPI293\": ['HEK 293'],  # EXPI293 derived from HEK293\n            }),\n        make_expression_groups(\n            \"human_protein_atlas:%s\" % os.path.basename(gtex_filename),\n            gtex_df,\n            groups={\n                \"sample_type:LUNG\": [\"lung\"],\n                \"sample_type:SPLEEN\": [\"spleen\"],\n                \"sample_type:OVARY\": [\"ovary\"],\n                \"sample_type:KIDNEY\": [\"kidney\"],\n            }),\n    ]\n\n\ndef make_expression_mixtures(expression_df):\n    global CELL_LINE_MIXTURES\n    groups = {}\n    for mix in CELL_LINE_MIXTURES:\n        components = []\n        for item in mix.replace(\"mix:\", \"\").upper().split(\",\"):\n            if \"cell_line:%s\" % item in expression_df.columns:\n                components.append(\"cell_line:%s\" % item)\n            else:\n                print(\"No cell line, falling back on similar: \", item)\n                components.append(\"sample_type:%s-LIKE\" % item)\n        groups[\"sample_type:\" + mix.upper()] = components\n    missing = set()\n    for some in groups.values():\n        for item in some:\n            if item not in expression_df.columns:\n                missing.add(item)\n    if missing:\n        raise ValueError(\n            \"Missing [%d]: %s. Available: %s\" % (\n                len(missing), missing, expression_df.columns.tolist()))\n    return make_expression_groups(\"mixtures\", expression_df, groups)\n\n\n# Add all functions with names like handle_pmid_XXXX to PMID_HANDLERS dict.\nfor (key, value) in list(locals().items()):\n    if key.startswith(\"handle_pmid_\"):\n        PMID_HANDLERS[key.replace(\"handle_pmid_\", \"\")] = value\n    elif key.startswith(\"handle_expression_\"):\n        EXPRESSION_HANDLERS[key.replace(\"handle_expression_\", \"\")] = value\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n\n    expression_dfs = []\n    for (i, item_tpl) in enumerate(args.expression_item):\n        (label, filenames) = (item_tpl[0], item_tpl[1:])\n        label = label.replace(\"-\", \"_\")\n        print(\n            \"Processing expression item %d of %d\" % (i + 1, len(args.expression_item)),\n            label,\n            *[os.path.abspath(f) for f in filenames])\n\n        expression_dfs_for_item = []\n        handler = None\n        if label in EXPRESSION_HANDLERS:\n            handler = EXPRESSION_HANDLERS[label]\n            expression_dfs_for_item = handler(*filenames)\n        elif args.debug:\n            debug(*filenames)\n        else:\n            raise NotImplementedError(label)\n\n        if expression_dfs_for_item:\n            print(\n                \"Processed expression data\",\n                label,\n                \"result dataframes\",\n                len(expression_dfs_for_item))\n            print(*[e.columns for e in expression_dfs_for_item])\n            expression_dfs.extend(expression_dfs_for_item)\n\n    expression_df = expression_dfs[0]\n    for other in expression_dfs[1:]:\n        expression_df = pandas.merge(\n            expression_df, other, how='outer', left_index=True, right_index=True)\n\n    print(\"Genes in each expression dataframe: \",\n        *[len(e) for e in expression_dfs])\n    print(\"Genes in merged expression dataframe\", len(expression_df))\n\n    if CELL_LINE_MIXTURES:\n        print(\"Generating cell line mixtures.\")\n        expression_mixture_df = make_expression_mixtures(expression_df)\n        expression_df = pandas.merge(\n            expression_df,\n            expression_mixture_df,\n            how='outer',\n            left_index=True,\n            right_index=True)\n\n    ms_dfs = []\n    for (i, item_tpl) in enumerate(args.ms_item):\n        (pmid, filenames) = (item_tpl[0], item_tpl[1:])\n        print(\n            \"Processing MS item %d of %d\" % (i + 1, len(args.ms_item)),\n            pmid,\n            *[os.path.abspath(f) for f in filenames])\n\n        ms_df = None\n        handler = None\n        if pmid in PMID_HANDLERS:\n            handler = PMID_HANDLERS[pmid]\n            ms_df = handler(*filenames)\n        elif args.debug:\n            debug(*filenames)\n        else:\n            raise NotImplementedError(pmid)\n\n        if ms_df is not None:\n            ms_df[\"pmid\"] = pmid\n            if \"original_pmid\" not in ms_df.columns:\n                ms_df[\"original_pmid\"] = pmid\n            if \"expression_dataset\" not in ms_df.columns:\n                ms_df[\"expression_dataset\"] = \"\"\n            ms_df = ms_df.applymap(str).applymap(str.upper)\n            ms_df[\"sample_id\"] = ms_df.sample_id.str.replace(\" \", \"\")\n            print(\"*** PMID %s: %d peptides ***\" % (pmid, len(ms_df)))\n            if handler is not None:\n                print(handler.__doc__)\n            print(\"Counts by sample id:\")\n            print(ms_df.groupby(\"sample_id\").peptide.nunique())\n            print(\"\")\n            print(\"Counts by sample type:\")\n            print(ms_df.groupby(\"sample_type\").peptide.nunique())\n            print(\"****************************\")\n\n            for value in ms_df.expression_dataset.unique():\n                if value and value not in expression_df.columns:\n                    raise ValueError(\"No such expression dataset\", value)\n\n            ms_dfs.append(ms_df)\n\n    ms_df = pandas.concat(ms_dfs, ignore_index=True, sort=False)\n    ms_df[\"cell_line\"] = ms_df[\"cell_line\"].fillna(\"\")\n    ms_df[\"hla\"] = ms_df[\"hla\"].str.strip().str.replace(r'\\s+', ' ')\n\n    sample_table = ms_df[\n        [\"sample_id\", \"pmid\", \"expression_dataset\", \"cell_line\", \"sample_type\"]\n    ].drop_duplicates().set_index(\"sample_id\")\n\n    sample_id_to_expression_dataset = sample_table.expression_dataset.to_dict()\n    for (sample_id, value) in sorted(sample_id_to_expression_dataset.items()):\n        if value:\n            print(\"Expression dataset for sample\", sample_id, \"already assigned\")\n            continue\n        cell_line_col = \"cell_line:\" + sample_table.loc[sample_id, \"cell_line\"]\n        sample_type_col = \"sample_type:\" + (\n            sample_table.loc[sample_id, \"sample_type\"])\n\n        expression_dataset = None\n        for col in [cell_line_col, sample_type_col]:\n            if col in expression_df.columns:\n                expression_dataset = col\n                break\n\n        if not expression_dataset:\n            print(\"*\" * 20)\n            print(\"No expression dataset for sample \", sample_id)\n            print(\"Sample info:\")\n            print(sample_table.loc[sample_id])\n            print(\"*\" * 20)\n\n        sample_id_to_expression_dataset[sample_id] = expression_dataset\n        print(\n            \"Sample\", sample_id, \"assigned exp. dataset\", expression_dataset)\n\n    print(\"Expression dataset usage:\")\n    print(pandas.Series(sample_id_to_expression_dataset).value_counts())\n\n    missing = [\n        key for (key, value) in\n        sample_id_to_expression_dataset.items()\n        if value is None\n    ]\n    if missing:\n        print(\"Missing expression data for samples\", *missing)\n        print(\n            \"Missing cell lines: \",\n            *sample_table.loc[missing, \"cell_line\"].dropna().drop_duplicates().tolist())\n        print(\"Missing sample types: \", *sample_table.loc[\n            missing, \"sample_type\"].dropna().drop_duplicates().tolist())\n        if args.debug:\n            import ipdb; ipdb.set_trace()\n        else:\n            raise ValueError(\"Missing expression data for samples: \", missing)\n\n    ms_df[\"expression_dataset\"] = ms_df.sample_id.map(\n        sample_id_to_expression_dataset)\n\n    cols = [\n        \"pmid\",\n        \"sample_id\",\n        \"peptide\",\n        \"format\",\n        \"mhc_class\",\n        \"hla\",\n        \"expression_dataset\",\n    ]\n    cols += [c for c in sorted(ms_df.columns) if c not in cols]\n    ms_df = ms_df[cols]\n\n    null_df = ms_df.loc[ms_df.isnull().any(1)]\n    if len(null_df) > 0:\n        print(\"Nulls:\")\n        print(null_df)\n    else:\n        print(\"No nulls.\")\n\n    # Each sample should be coming from only one experiment.\n    assert ms_df.groupby(\"sample_id\").pmid.nunique().max() == 1, (\n        ms_df.groupby(\"sample_id\").pmid.nunique().sort_values())\n\n    expression_df.to_csv(args.expression_out, index=True)\n    print(\"Wrote: %s\" % os.path.abspath(args.expression_out))\n\n    ms_df.to_csv(args.ms_out, index=False)\n    print(\"Wrote: %s\" % os.path.abspath(args.ms_out))\n\n    if args.expression_metadata_out is not None:\n        expression_metadata_df = pandas.DataFrame(\n            EXPRESSION_GROUPS_ROWS,\n            columns=[\"expression_dataset\", \"label\", \"samples\"])\n        expression_metadata_df[\"samples\"] = expression_metadata_df[\n            \"samples\"\n        ].map(json.dumps)\n        expression_metadata_df.to_csv(args.expression_metadata_out, index=False)\n        print(\"Wrote: %s\" % os.path.abspath(args.expression_metadata_out))\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_curated/requirements.txt",
    "content": "xlrd>=1.1.0\n"
  },
  {
    "path": "downloads-generation/data_evaluation/GENERATE.sh",
    "content": "#!/bin/bash\n#\n#\n# Usage: GENERATE.sh <local|cluster> <fresh|continue-incomplete>\n#\n# cluster mode uses an HPC cluster (Mount Sinai chimera cluster, which uses lsf job\n# scheduler). This would need to be modified for other sites.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=data_evaluation\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nif [ \"$2\" != \"continue-incomplete\" ]\nthen\n    echo \"Fresh run\"\n    rm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n    mkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nelse\n    echo \"Continuing incomplete run\"\nfi\n\n# Send stdout and stderr to a logfile included with the archive.\nLOG=\"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.$(date +%s).txt\"\nexec >  >(tee -ia \"$LOG\")\nexec 2> >(tee -ia \"$LOG\" >&2)\n\n# Log some environment info\necho \"Invocation: $0 $@\"\ndate\npip freeze\ngit status\nmhcflurry-downloads info\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nexport OMP_NUM_THREADS=1\nexport PYTHONUNBUFFERED=1\nexport MHCFLURRY_DEFAULT_PREDICT_BATCH_SIZE=16384\n\n## GENERATE BENCHMARK: MONOALLELIC\n#for kind in train_excluded all\nfor kind in train_excluded\ndo\n    EXCLUDE_TRAIN_DATA=\"\"\n    if [ \"$kind\" == \"train_excluded\" ]\n    then\n        EXCLUDE_TRAIN_DATA=\"$(mhcflurry-downloads path models_class1_pan_variants)/models.no_additional_ms/train_data.csv.bz2\"\n    fi\n\n    if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.monoallelic.$kind.csv.bz2\" ]\n    then\n        echo \"Reusing existing monoallelic benchmark: benchmark.monoallelic.$kind.csv.bz2\"\n    else\n        cp $SCRIPT_DIR/make_benchmark.py .\n        time python make_benchmark.py \\\n            --hits \"$(mhcflurry-downloads path models_class1_processing)/hits_with_tpm.csv.bz2\" \\\n            --proteome-peptides \"$(mhcflurry-downloads path models_class1_processing)/proteome_peptides.csv.bz2\" \\\n            --decoys-per-hit 110 \\\n            --exclude-train-data \"$EXCLUDE_TRAIN_DATA\" \\\n            --only-format MONOALLELIC \\\n            --out \"$(pwd)/benchmark.monoallelic.$kind.csv\"\n        bzip2 -f benchmark.monoallelic.$kind.csv\n    fi\ndone\n\n### GENERATE BENCHMARK: MULTIALLELIC\n#for kind in train_excluded all\nfor kind in train_excluded\ndo\n    EXCLUDE_TRAIN_DATA=\"\"\n    if [ \"$kind\" == \"train_excluded\" ]\n    then\n        EXCLUDE_TRAIN_DATA=\"$(mhcflurry-downloads path models_class1_pan)/models.combined/train_data.csv.bz2\"\n    fi\n\n    if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.multiallelic.$kind.csv.bz2\" ]\n    then\n        echo \"Reusing existing multiallelic benchmark\"\n    else\n        cp $SCRIPT_DIR/make_benchmark.py .\n        time python make_benchmark.py \\\n            --hits \"$(mhcflurry-downloads path models_class1_processing)/hits_with_tpm.csv.bz2\" \\\n            --proteome-peptides \"$(mhcflurry-downloads path models_class1_processing)/proteome_peptides.csv.bz2\" \\\n            --decoys-per-hit 110 \\\n            --exclude-train-data \"$EXCLUDE_TRAIN_DATA\" \\\n            --only-format MULTIALLELIC \\\n            --out \"$(pwd)/benchmark.multiallelic.$kind.csv\"\n        bzip2 -f benchmark.multiallelic.$kind.csv\n    fi\ndone\n\nfor kind in train_excluded\ndo\n    ### SPLIT BENCHMARK: MONOALLELIC\n    if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"MONOALLELIC_SAMPLES\" ]\n    then\n        echo \"Reusing existing monoallelic $kind benchmark pieces\"\n    else\n        cp $SCRIPT_DIR/split_by_sample.py .\n        time python split_by_sample.py \\\n            \"$(pwd)/benchmark.monoallelic.$kind.csv.bz2\" \\\n            --out \"$(pwd)/benchmark.monoallelic.$kind.%s.csv\" \\\n            --out-samples MONOALLELIC_SAMPLES\n\n        for sample in $(cat MONOALLELIC_SAMPLES)\n        do\n            bzip2 -f \"$(pwd)/benchmark.monoallelic.$kind.$sample.csv\"\n        done\n    fi\n\n    ### SPLIT BENCHMARK: MULTIALLELIC\n    if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"MULTIALLELIC_SAMPLES\" ]\n    then\n        echo \"Reusing existing multiallelic $kind benchmark pieces\"\n    else\n        cp $SCRIPT_DIR/split_by_sample.py .\n        time python split_by_sample.py \\\n            \"$(pwd)/benchmark.multiallelic.$kind.csv.bz2\" \\\n            --out \"$(pwd)/benchmark.multiallelic.$kind.%s.csv\" \\\n            --out-samples MULTIALLELIC_SAMPLES\n\n        for sample in $(cat MULTIALLELIC_SAMPLES)\n        do\n            bzip2 -f \"$(pwd)/benchmark.multiallelic.$kind.$sample.csv\"\n        done\n    fi\ndone\n\nrm -rf commands\nmkdir commands\n\nrm -f CHECK_FILES\n\n#for kind in train_excluded all\nfor kind in train_excluded\ndo\n    MONOALLELIC_AFFINITY_PREDICTOR=\"$(mhcflurry-downloads path models_class1_pan_variants)/models.no_additional_ms\"\n    echo \"MONOALLELIC: Using affinity predictor: MONOALLELIC_AFFINITY_PREDICTOR\"\n    cat \"$MONOALLELIC_AFFINITY_PREDICTOR/info.txt\"\n\n    GROUP=group.monoallelic.no_additional_ms.$kind.csv\n    echo \"filename\" > $GROUP\n\n    for sample in $(cat MONOALLELIC_SAMPLES)\n    do\n        ## AFFINITY PREDICTOR VARIANT: MONOALLELIC\n        if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.monoallelic.no_additional_ms.$kind.$sample.csv.bz2\" ]\n        then\n            echo \"Reusing existing monoallelic benchmark predictions $sample\"\n        else\n            echo time mhcflurry-predict \\\n                \"$(pwd)/benchmark.monoallelic.$kind.$sample.csv.bz2\" \\\n                --allele-column hla \\\n                --prediction-column-prefix no_additional_ms_ \\\n                --models \\\"\"$MONOALLELIC_AFFINITY_PREDICTOR\"\\\" \\\n                --affinity-only \\\n                --no-affinity-percentile \\\n                --out \"$(pwd)/benchmark.monoallelic.no_additional_ms.$kind.$sample.csv\" \\\n                --no-throw >> commands/monoallelic.$kind.$sample.sh\n            echo bzip2 -f \"$(pwd)/benchmark.monoallelic.no_additional_ms.$kind.$sample.csv\" >> commands/monoallelic.$kind.$sample.sh\n        fi\n        echo ls -lh \"benchmark.monoallelic.no_additional_ms.$kind.$sample.csv.bz2\" >> CHECK_FILES\n        echo \"benchmark.monoallelic.no_additional_ms.$kind.$sample.csv.bz2\" >> $GROUP\n    done\n\n    MULTIALLELIC_AFFINITY_PREDICTOR=\"$(mhcflurry-downloads path models_class1_pan)/models.combined\"\n    echo \"MULTIALLELIC: Using affinity predictor: MULTIALLELIC_AFFINITY_PREDICTOR\"\n    cat \"$MULTIALLELIC_AFFINITY_PREDICTOR/info.txt\"\n\n    GROUP=group.multiallelic.production.$kind.csv\n    echo \"filename\" > $GROUP\n\n    for sample in $(cat MULTIALLELIC_SAMPLES)\n    do\n        ### AFFINITY PREDICTORS: MULTIALLELIC\n        if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.multiallelic.production.$kind.$sample.csv.bz2\" ]\n        then\n            echo \"Reusing existing multiallelic predictions $sample\"\n        else\n            echo time mhcflurry-predict \\\n                \"$(pwd)/benchmark.multiallelic.$kind.$sample.csv.bz2\" \\\n                --allele-column hla \\\n                --prediction-column-prefix mhcflurry_production_ \\\n                --models \\\"\"$MULTIALLELIC_AFFINITY_PREDICTOR\"\\\" \\\n                --affinity-only \\\n                --no-affinity-percentile \\\n                --out \"$(pwd)/benchmark.multiallelic.production.$kind.$sample.csv\" >> commands/multiallelic.production.$kind.$sample.sh\n            echo bzip2 -f \"$(pwd)/benchmark.multiallelic.production.$kind.$sample.csv\" >> commands/multiallelic.production.$kind.$sample.sh\n        fi\n        echo ls -lh \"benchmark.multiallelic.production.$kind.$sample.csv.bz2\" >> CHECK_FILES\n        echo \"benchmark.multiallelic.production.$kind.$sample.csv.bz2\" >> $GROUP\n    done\n\n    #for sample in $(cat MULTIALLELIC_SAMPLES)\n    #do\n        #for variant in no_additional_ms compact_peptide affinity_only no_pretrain single_hidden_no_pretrain 500nm\n        #for variant in 50nm\n        # TODO: IF this gets enabled, add the $GROUP stuff.\n        #do\n        #    if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.multiallelic.${variant}.$kind.$sample.csv.bz2\" ]\n        #    then\n        #        echo \"Reusing existing multiallelic predictions: ${variant} $sample\"\n        #    else\n        #        echo time mhcflurry-predict \\\n        #            \"$(pwd)/benchmark.multiallelic.$kind.$sample.csv.bz2\" \\\n        #            --allele-column hla \\\n        #            --prediction-column-prefix \"${variant}_\" \\\n        #            --models \\\"\"$(mhcflurry-downloads path models_class1_pan_variants)/models.$variant\"\\\" \\\n        #            --affinity-only \\\n        #            --no-affinity-percentile \\\n        #            --out \"$(pwd)/benchmark.multiallelic.${variant}.$kind.$sample.csv\" >> commands/multiallelic.${variant}.$kind.$sample.sh\n        #        echo bzip2 -f \"$(pwd)/benchmark.multiallelic.${variant}.$kind.$sample.csv\" >> commands/multiallelic.${variant}.$kind.$sample.sh\n        #    fi\n        #    echo ls -lh \"benchmark.multiallelic.${variant}.$kind.$sample.csv.bz2\" >> CHECK_FILES\n        #done\n    #done\n\n    PRESENTATION_PREDICTOR=\"$(mhcflurry-downloads path models_class1_presentation)/models\"\n    echo \"Using presentation predictor: $PRESENTATION_PREDICTOR\"\n    cat \"$PRESENTATION_PREDICTOR/info.txt\"\n\n    GROUP=group.multiallelic.presentation_with_flanks.$kind.csv\n    echo \"filename\" > $GROUP\n\n    for sample in $(cat MULTIALLELIC_SAMPLES)\n    do\n        ### PRESENTATION: WITH FLANKS\n        if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.multiallelic.presentation_with_flanks.$kind.$sample.csv.bz2\" ]\n        then\n            echo \"Reusing existing multiallelic presentation with flanks $sample\"\n        else\n            echo time mhcflurry-predict \\\n                \"$(pwd)/benchmark.multiallelic.$kind.$sample.csv.bz2\" \\\n                --allele-column hla \\\n                --prediction-column-prefix presentation_with_flanks_ \\\n                --models \\\"\"$PRESENTATION_PREDICTOR\"\\\" \\\n                --no-affinity-percentile \\\n                --out \"$(pwd)/benchmark.multiallelic.presentation_with_flanks.$kind.$sample.csv\" >> commands/multiallelic.presentation_with_flanks.$kind.$sample.sh\n            echo bzip2 -f \"$(pwd)/benchmark.multiallelic.presentation_with_flanks.$kind.$sample.csv\"  >> commands/multiallelic.presentation_with_flanks.$kind.$sample.sh\n        fi\n        echo ls -lh \"benchmark.multiallelic.presentation_with_flanks.$kind.$sample.csv.bz2\" >> CHECK_FILES\n        echo \"benchmark.multiallelic.presentation_with_flanks.$kind.$sample.csv.bz2\" >> $GROUP\n    done\n\n    GROUP=group.multiallelic.presentation_without_flanks.$kind.csv\n    echo \"filename\" > $GROUP\n\n    for sample in $(cat MULTIALLELIC_SAMPLES)\n    do\n        ### PRESENTATION: NO FLANKS\n        if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.multiallelic.presentation_without_flanks.$kind.$sample.csv.bz2\" ]\n        then\n            echo \"Reusing existing multiallelic presentation without flanks $sample\"\n        else\n            echo time mhcflurry-predict \\\n                \"$(pwd)/benchmark.multiallelic.$kind.$sample.csv.bz2\" \\\n                --allele-column hla \\\n                --prediction-column-prefix presentation_without_flanks_ \\\n                --models \\\"\"$PRESENTATION_PREDICTOR\"\\\" \\\n                --no-affinity-percentile \\\n                --no-flanking \\\n                --out \"$(pwd)/benchmark.multiallelic.presentation_without_flanks.$kind.$sample.csv\" >> commands/multiallelic.presentation_without_flanks.$kind.$sample.sh\n            echo bzip2 -f \"$(pwd)/benchmark.multiallelic.presentation_without_flanks.$kind.$sample.csv\"  >> commands/multiallelic.presentation_without_flanks.$kind.$sample.sh\n        fi\n        echo ls -lh \"benchmark.multiallelic.presentation_without_flanks.$kind.$sample.csv.bz2\" >> CHECK_FILES\n        echo \"benchmark.multiallelic.presentation_without_flanks.$kind.$sample.csv.bz2\" >> $GROUP\n    done\n\n    ### PRECOMPUTED ####\n    for variant in netmhcpan4.ba netmhcpan4.el mixmhcpred\n    do\n        GROUP=group.monoallelic.${variant}.$kind.csv\n        echo \"filename\" > $GROUP\n\n        for sample in $(cat MONOALLELIC_SAMPLES)\n        do\n            if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.monoallelic.${variant}.$kind.$sample.csv.bz2\" ]\n            then\n                echo \"Reusing existing monoallelic ${variant} $sample\"\n            else\n                cp $SCRIPT_DIR/join_with_precomputed.py .\n                echo time python join_with_precomputed.py \\\n                    \\\"\"$(pwd)/benchmark.monoallelic.$kind.$sample.csv.bz2\"\\\" \\\n                    ${variant} \\\n                    --out \"$(pwd)/benchmark.monoallelic.${variant}.$kind.$sample.csv\" >> commands/monoallelic.${variant}.$kind.$sample.sh\n                echo bzip2 -f \"$(pwd)/benchmark.monoallelic.${variant}.$kind.$sample.csv\"  >> commands/monoallelic.${variant}.$kind.$sample.sh\n            fi\n            echo ls -lh \"benchmark.monoallelic.${variant}.$kind.$sample.csv.bz2\" >> CHECK_FILES\n            echo \"benchmark.monoallelic.${variant}.$kind.$sample.csv.bz2\" >> $GROUP\n        done\n\n        GROUP=group.multiallelic.${variant}.$kind.csv\n        echo \"filename\" > $GROUP\n\n        for sample in $(cat MULTIALLELIC_SAMPLES)\n        do\n            if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"benchmark.multiallelic.${variant}.$kind.$sample.csv.bz2\" ]\n            then\n                echo \"Reusing existing multiallelic ${variant} $sample\"\n            else\n                cp $SCRIPT_DIR/join_with_precomputed.py .\n                echo time python join_with_precomputed.py \\\n                    \\\"\"$(pwd)/benchmark.multiallelic.$kind.$sample.csv.bz2\"\\\" \\\n                    ${variant} \\\n                    --out \"$(pwd)/benchmark.multiallelic.${variant}.$kind.$sample.csv\" >> commands/multiallelic.${variant}.$kind.$sample.sh\n                echo bzip2 -f \"$(pwd)/benchmark.multiallelic.${variant}.$kind.$sample.csv\"  >> commands/multiallelic.${variant}.$kind.$sample.sh\n            fi\n            echo ls -lh \"benchmark.multiallelic.${variant}.$kind.$sample.csv.bz2\" >> CHECK_FILES\n            echo \"benchmark.multiallelic.${variant}.$kind.$sample.csv.bz2\" >> $GROUP\n        done\n    done\ndone\n\nls -lh commands\n\nif [ \"$1\" != \"cluster\" ]\nthen\n    echo \"Running locally\"\n    for i in $(ls commands/*.sh)\n    do\n        echo \"# *******\"\n        echo \"# Command $i\"\n        cat $i\n        bash $i\n    done\nelse\n    echo \"Running on cluster\"\n    for i in $(ls commands/*.sh)\n    do\n        echo \"# *******\"\n        echo \"# Command $i\"\n        cat $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.lsf > ${i}.lsf\n        echo cd \"$(pwd)\" >> ${i}.lsf\n        cat $i >> ${i}.lsf\n        cat ${i}.lsf\n        bsub -K < \"${i}.lsf\" &\n        sleep 1\n    done\n    wait\nfi\n\nbash CHECK_FILES\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 -f \"$LOG\"\nfor i in $(ls LOG-worker.*.txt) ; do bzip2 -f $i ; done\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n\n# Split into <2GB chunks for GitHub\nPARTS=\"${RESULT}.part.\"\n# Check for pre-existing part files and rename them.\nfor i in $(ls \"${PARTS}\"* )\ndo\n    DEST=\"${i}.OLD.$(date +%s)\"\n    echo \"WARNING: already exists: $i . Moving to $DEST\"\n    mv $i $DEST\ndone\nsplit -b 2000m \"$RESULT\" \"$PARTS\"\necho \"Split into parts:\"\nls -lh \"${PARTS}\"*"
  },
  {
    "path": "downloads-generation/data_evaluation/cluster_submit_script_header.mssm_hpc.lsf",
    "content": "#!/bin/bash\n#BSUB -J MHCf # Job name\n#BSUB -P acc_nkcancer # allocation account or Unix group\n#BSUB -q premium # queue\n#BSUB -R span[hosts=1] # one node\n#BSUB -n 1 # number of compute cores\n#BSUB -W 40:00 # walltime in HH:MM\n#BSUB -R rusage[mem=45000] # mb memory requested\n#BSUB -o %J.stdout # output log (%J : JobID)\n#BSUB -eo %J.stderr # error log\n#BSUB -L /bin/bash # Initialize the execution environment\n#\n\nset -e\nset -x\n\necho \"Subsequent stderr output redirected to stdout\" >&2\nexec 2>&1\n\nexport TMPDIR=/local/JOBS/mhcflurry-{work_item_num}\nexport PATH=$HOME/mhcflurry-conda-environment/bin/:$PATH\nexport PYTHONUNBUFFERED=1\nexport KMP_SETTINGS=1\n\nfree -m\nmodule list\nenv\n\n"
  },
  {
    "path": "downloads-generation/data_evaluation/join_with_precomputed.py",
    "content": "\"\"\"\nJoin benchmark with precomputed predictions.\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport numpy\nimport collections\n\nimport pandas\nimport tqdm\n\nimport mhcflurry\nfrom mhcflurry.downloads import get_path\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"benchmark\")\nparser.add_argument(\n    \"predictors\",\n    nargs=\"+\",\n    choices=(\"netmhcpan4.ba\", \"netmhcpan4.el\", \"mixmhcpred\"))\nparser.add_argument(\n    \"--out\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"File to write\")\n\n\ndef load_results(dirname, result_df=None, columns=None):\n    peptides = pandas.read_csv(os.path.join(dirname, \"peptides.csv\")).peptide\n    manifest_df = pandas.read_csv(os.path.join(dirname, \"alleles.csv\"))\n\n    print(\"Loading results. Existing data has\", len(peptides), \"peptides and\",\n        len(manifest_df), \"columns\")\n\n    if columns is None:\n        columns = manifest_df.col.values\n\n    if result_df is None:\n        result_df = pandas.DataFrame(\n            index=peptides,\n            columns=columns,\n            dtype=\"float32\")\n        result_df[:] = numpy.nan\n        peptides_to_assign = peptides\n        mask = None\n    else:\n        mask = (peptides.isin(result_df.index)).values\n        peptides_to_assign = peptides[mask]\n\n    manifest_df = manifest_df.loc[manifest_df.col.isin(result_df.columns)]\n\n    print(\"Will load\", len(peptides), \"peptides and\", len(manifest_df), \"cols\")\n\n    for _, row in tqdm.tqdm(manifest_df.iterrows(), total=len(manifest_df)):\n        with open(os.path.join(dirname, row.path), \"rb\") as fd:\n            value = numpy.load(fd)['arr_0'].astype(numpy.float32)\n            if mask is not None:\n                value = value[mask]\n            result_df.loc[peptides_to_assign, row.col] = value\n\n    return result_df\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    df = pandas.read_csv(args.benchmark)\n\n    peptides = df.peptide.unique()\n    alleles = set()\n    for some in df.hla.unique():\n        alleles.update(some.split())\n\n    precomputed_dfs = {}\n\n    if 'netmhcpan4.ba' in args.predictors:\n        precomputed_dfs['netmhcpan4.ba'] = load_results(\n            get_path(\"data_predictions\", \"predictions/all.netmhcpan4.ba\"),\n            result_df=pandas.DataFrame(\n                dtype=numpy.float32,\n                index=peptides,\n                columns=[\"%s affinity\" % a for a in alleles])).rename(\n            columns=lambda s: s.replace(\"affinity\", \"\").strip())\n        precomputed_dfs['netmhcpan4.ba'] *= -1  # flip since it's affinities\n\n    if 'netmhcpan4.el' in args.predictors:\n        precomputed_dfs['netmhcpan4.el'] = load_results(\n            get_path(\"data_predictions\", \"predictions/all.netmhcpan4.el\"),\n            result_df=pandas.DataFrame(\n                dtype=numpy.float32,\n                index=peptides,\n                columns=[\"%s score\" % a for a in alleles])).rename(\n            columns=lambda s: s.replace(\"score\", \"\").strip())\n\n    if 'mixmhcpred' in args.predictors:\n        precomputed_dfs['mixmhcpred'] = load_results(\n            get_path(\"data_predictions\", \"predictions/all.mixmhcpred\"),\n            result_df=pandas.DataFrame(\n                dtype=numpy.float32,\n                index=peptides,\n                columns=[\"%s score\" % a for a in alleles])).rename(\n            columns=lambda s: s.replace(\"score\", \"\").strip())\n\n    skip_experiments = set()\n\n    for hla_text, sub_df in tqdm.tqdm(df.groupby(\"hla\"), total=df.hla.nunique()):\n        hla = hla_text.split()\n        for (name, precomputed_df) in precomputed_dfs.items():\n            df.loc[sub_df.index, name] = numpy.nan\n            prediction_df = pandas.DataFrame(index=sub_df.peptide, dtype=float)\n            for allele in hla:\n                if allele not in precomputed_df.columns or precomputed_df[allele].isnull().all():\n                    print(sub_df.sample_id.unique(), hla)\n                    skip_experiments.update(sub_df.sample_id.unique())\n                prediction_df[allele] = precomputed_df.loc[\n                    prediction_df.index, allele\n                ]\n            df.loc[sub_df.index, name] = prediction_df.max(1, skipna=False).values\n            df.loc[sub_df.index, name + \"_best_allele\"] = prediction_df.idxmax(\n                1, skipna=False).values\n\n    if 'netmhcpan4.ba' in args.predictors:\n        # unflip the values\n        df['netmhcpan4.ba'] *= -1\n\n    print(\"Skip experiments\", skip_experiments)\n    print(\"results\")\n    print(df)\n\n    df.to_csv(args.out, index=False)\n    print(\"Wrote\", args.out)\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_evaluation/make_benchmark.py",
    "content": "\"\"\"\nMake training data by selecting decoys, etc.\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport numpy\nimport collections\n\nimport pandas\nimport tqdm\n\nimport mhcflurry\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--hits\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Multiallelic mass spec\")\nparser.add_argument(\n    \"--proteome-peptides\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Proteome peptides\")\nparser.add_argument(\n    \"--decoys-per-hit\",\n    type=float,\n    metavar=\"N\",\n    default=99,\n    help=\"Decoys per hit\")\nparser.add_argument(\n    \"--exclude-pmid\",\n    nargs=\"+\",\n    default=[],\n    help=\"Exclude given PMID\")\nparser.add_argument(\n    \"--only-pmid\",\n    nargs=\"*\",\n    default=[],\n    help=\"Include only the given PMID\")\nparser.add_argument(\n    \"--exclude-train-data\",\n    nargs=\"+\",\n    default=[],\n    help=\"Remove hits and decoys included in the given training data\")\nparser.add_argument(\n    \"--only-format\",\n    choices=(\"MONOALLELIC\", \"MULTIALLELIC\"),\n    help=\"Include only data of the given format\")\nparser.add_argument(\n    \"--sample-fraction\",\n    type=float,\n    help=\"Subsample data by specified fraction (e.g. 0.1)\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"File to write\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    hit_df = pandas.read_csv(args.hits)\n    hit_df[\"pmid\"] = hit_df[\"pmid\"].astype(str)\n    original_samples_pmids = hit_df.pmid.unique()\n    numpy.testing.assert_equal(hit_df.hit_id.nunique(), len(hit_df))\n    hit_df = hit_df.loc[\n        (hit_df.mhc_class == \"I\") &\n        (hit_df.peptide.str.len() <= 11) &\n        (hit_df.peptide.str.len() >= 8) &\n        (~hit_df.protein_ensembl.isnull()) &\n        (hit_df.peptide.str.match(\"^[%s]+$\" % \"\".join(\n            mhcflurry.amino_acid.COMMON_AMINO_ACIDS)))\n    ]\n    hit_df['alleles'] = hit_df.hla.str.split().map(tuple)\n    print(\"Loaded hits from %d samples\" % hit_df.sample_id.nunique())\n    if args.only_format:\n        hit_df = hit_df.loc[hit_df.format == args.only_format].copy()\n        print(\"Subselected to %d %s samples\" % (\n            hit_df.sample_id.nunique(), args.only_format))\n\n    if args.only_pmid or args.exclude_pmid:\n        assert not (args.only_pmid and args.exclude_pmid)\n\n        pmids = list(args.only_pmid) + list(args.exclude_pmid)\n        missing = [pmid for pmid in pmids if pmid not in original_samples_pmids]\n        assert not missing, (missing, original_samples_pmids)\n\n        mask = hit_df.pmid.isin(pmids)\n        if args.exclude_pmid:\n            mask = ~mask\n\n        new_hit_df = hit_df.loc[mask]\n        print(\n            \"Selecting by pmids\",\n            pmids,\n            \"reduced dataset from\",\n            len(hit_df),\n            \"to\",\n            len(new_hit_df))\n        hit_df = new_hit_df.copy()\n        print(\"Subselected by pmid to %d samples\" % hit_df.sample_id.nunique())\n\n    allele_to_excluded_peptides = collections.defaultdict(set)\n    for train_dataset in args.exclude_train_data:\n        if not train_dataset:\n            continue\n        print(\"Excluding hits from\", train_dataset)\n        train_df = pandas.read_csv(train_dataset)\n        for (allele, peptides) in train_df.groupby(\"allele\").peptide.unique().items():\n            allele_to_excluded_peptides[allele].update(peptides)\n        train_counts = train_df.groupby(\n            [\"allele\", \"peptide\"]).measurement_value.count().to_dict()\n        hit_no_train = hit_df.loc[\n            [\n                not any([\n                    train_counts.get((allele, row.peptide))\n                    for allele in row.alleles\n                ])\n            for _, row in tqdm.tqdm(hit_df.iterrows(), total=len(hit_df))]\n        ]\n        print(\n            \"Excluding hits from\",\n            train_dataset,\n            \"reduced dataset from\",\n            len(hit_df),\n            \"to\",\n            len(hit_no_train))\n        hit_df = hit_no_train\n\n    sample_table = hit_df.drop_duplicates(\"sample_id\").set_index(\"sample_id\")\n    grouped = hit_df.groupby(\"sample_id\").nunique()\n    for col in sample_table.columns:\n        if (grouped[col] > 1).any():\n            del sample_table[col]\n\n    print(\"Loading proteome peptides\")\n    all_peptides_df = pandas.read_csv(args.proteome_peptides)\n    print(\"Loaded: \", all_peptides_df.shape)\n\n    all_peptides_df = all_peptides_df.loc[\n        all_peptides_df.protein_accession.isin(hit_df.protein_accession.unique()) &\n        all_peptides_df.peptide.str.match(\"^[%s]+$\" % \"\".join(\n            mhcflurry.amino_acid.COMMON_AMINO_ACIDS))\n    ].copy()\n    all_peptides_df[\"length\"] = all_peptides_df.peptide.str.len()\n    print(\"Subselected proteome peptides by accession: \", all_peptides_df.shape)\n\n    all_peptides_by_length = dict(iter(all_peptides_df.groupby(\"length\")))\n\n    print(\"Selecting decoys.\")\n\n    lengths = [8, 9, 10, 11]\n    result_df = []\n\n    for sample_id, sub_df in tqdm.tqdm(\n            hit_df.groupby(\"sample_id\"), total=hit_df.sample_id.nunique()):\n        result_df.append(\n            sub_df[[\n                \"protein_accession\",\n                \"peptide\",\n                \"sample_id\",\n                \"n_flank\",\n                \"c_flank\",\n            ]].copy())\n        result_df[-1][\"hit\"] = 1\n\n        excluded_peptides = set()\n        for allele in sample_table.loc[sample_id].alleles:\n            excluded_peptides.update(allele_to_excluded_peptides[allele])\n        print(\n            sample_id,\n            \"will exclude\",\n            len(excluded_peptides),\n            \"peptides from decoy universe\")\n\n        for length in lengths:\n            universe = all_peptides_by_length[length]\n            possible_universe = universe.loc[\n                (~universe.peptide.isin(sub_df.peptide.unique())) &\n                (~universe.peptide.isin(excluded_peptides)) &\n                (universe.protein_accession.isin(sub_df.protein_accession.unique()))\n            ]\n            selected_decoys = possible_universe.sample(\n                n=int(len(sub_df) * args.decoys_per_hit / len(lengths)))\n\n            result_df.append(selected_decoys[\n                [\"protein_accession\", \"peptide\", \"n_flank\", \"c_flank\"]\n            ].copy())\n            result_df[-1][\"hit\"] = 0\n            result_df[-1][\"sample_id\"] = sample_id\n\n    result_df = pandas.concat(result_df, ignore_index=True, sort=False)\n    result_df[\"hla\"] = result_df.sample_id.map(sample_table.hla)\n\n    print(result_df)\n    print(\"Counts:\")\n    print(result_df.groupby([\"sample_id\", \"hit\"]).peptide.nunique())\n\n    print(\"Hit counts:\")\n    print(\n        result_df.loc[\n            result_df.hit == 1\n        ].groupby(\"sample_id\").hit.count().sort_values())\n\n    print(\"Hit rates:\")\n    print(result_df.groupby(\"sample_id\").hit.mean().sort_values())\n\n    if args.sample_fraction:\n        print(\"Subsampling to \", args.sample_fraction)\n        result_df = result_df.sample(frac=args.sample_fraction)\n        print(\"Subsampled:\")\n        print(result_df)\n        print(\"Hit rates:\")\n        print(result_df.groupby(\"sample_id\").hit.mean().sort_values())\n\n    result_df.to_csv(args.out, index=False)\n    print(\"Wrote: \", args.out)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_evaluation/split_by_sample.py",
    "content": "\"\"\"\nSplit a big csv by a particular column (sample id)\n\"\"\"\nimport sys\nimport argparse\nimport re\n\nimport pandas\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"data\",\n    metavar=\"CSV\")\nparser.add_argument(\n    \"--out\",\n    help=\"Out pattern (%s will be replaced by sample)\",\n    metavar=\"CSV\")\nparser.add_argument(\n    \"--out-samples\",\n    help=\"Out sample list\",\n    metavar=\"CSV\")\nparser.add_argument(\n    \"--split-column\",\n    help=\"Column to split by\",\n    default=\"sample_id\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    df = pandas.read_csv(args.data)\n    print(\"Read data with shape\", df.shape)\n\n    names = []\n    for (i, (sample, sub_df)) in enumerate(df.groupby(args.split_column)):\n        name = re.sub(r'[^\\w\\d-]', '', sample) + (\".%d\" % i)\n        dest = args.out % name\n        sub_df.to_csv(dest, index=False)\n        print(\"Wrote [%d rows]\" % len(sub_df), dest)\n        names.append(name)\n\n    if args.out_samples:\n        pandas.Series(names).to_csv(args.out_samples, index=False, header=False)\n        print(\"Wrote\", args.out_samples)\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_iedb/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Download latest MHC I ligand data from IEDB.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=data_iedb\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nwget -q https://iedb.org/downloader.php?file_name=doc/mhc_ligand_full_single_file.zip -O mhc_ligand_full.zip\nwget -q http://www.iedb.org/downloader.php?file_name=doc/tcell_full_v3.zip -O tcell_full_v3.zip\n\nunzip mhc_ligand_full.zip\nrm mhc_ligand_full.zip\nbzip2 mhc_ligand_full.csv\n\nunzip tcell_full_v3.zip\nrm tcell_full_v3.zip\nbzip2 tcell_full_v3.csv\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/data_iedb/README.md",
    "content": "# IEDB Data\n\nThis download is a snapshot of the IEDB MHC ligand data, available at:\n\nhttp://www.iedb.org/doc/mhc_ligand_full.zip\n\nTo generate it, run:\n\n```\n./GENERATE.sh\n```"
  },
  {
    "path": "downloads-generation/data_mass_spec_annotated/GENERATE.sh",
    "content": "#!/bin/bash\n#\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=data_mass_spec_annotated\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\nexport PYTHONUNBUFFERED=1\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\ncp $SCRIPT_DIR/annotate.py .\n\nPEPTIDES=$(mhcflurry-downloads path data_curated)/ms.by_pmid.csv.bz2\nREFERENCES_DIR=$(mhcflurry-downloads path data_references)\n\npython annotate.py \\\n    \"$PEPTIDES\" \\\n    \"${REFERENCES_DIR}/uniprot_proteins.csv.bz2\" \\\n    \"${REFERENCES_DIR}/uniprot_proteins.fm\" \\\n    --out annotated_ms.csv\nbzip2 annotated_ms.csv\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/data_mass_spec_annotated/annotate.py",
    "content": "\"\"\"\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport time\nimport collections\nimport re\nfrom six.moves import StringIO\n\nimport pandas\nimport tqdm  # progress bar\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\nimport shellinford\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"peptides\",\n    metavar=\"FILE.csv\",\n    help=\"CSV of mass spec hits\")\nparser.add_argument(\n    \"reference_csv\",\n    metavar=\"FILE.csv\",\n    help=\"CSV of protein sequences\")\nparser.add_argument(\n    \"reference_index\",\n    metavar=\"FILE.fm\",\n    help=\"shellinford index over protein sequences\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"OUT.csv\",\n    help=\"Out file path\")\nparser.add_argument(\n    \"--flanking-length\",\n    metavar=\"N\",\n    type=int,\n    default=15,\n    help=\"Length of flanking sequence to include\")\nparser.add_argument(\n    \"--debug-max-rows\",\n    metavar=\"N\",\n    type=int,\n    default=None,\n    help=\"Max rows to process. Useful for debugging. If specified an ipdb \"\n    \"debugging session is also opened at the end of the script\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n\n    df = pandas.read_csv(args.peptides)\n    df[\"hit_id\"] = \"hit.\" + df.index.map('{0:07d}'.format)\n    df = df.set_index(\"hit_id\")\n    print(\"Read peptides\", df.shape, *df.columns.tolist())\n\n    reference_df = pandas.read_csv(args.reference_csv, index_col=0)\n    reference_df = reference_df.set_index(\"accession\")\n    print(\"Read proteins\", reference_df.shape, *reference_df.columns.tolist())\n\n    fm = shellinford.FMIndex()\n    fm.read(args.reference_index)\n    print(\"Read proteins index\")\n\n    join_df = []\n    for (hit_id, row) in tqdm.tqdm(df.iterrows(), total=len(df)):\n        matches = fm.search(row.peptide)\n        for match in matches:\n            reference_row = reference_df.iloc[match.doc_id]\n            starts = [\n                m.start() for m in\n                re.finditer(row.peptide, reference_row.seq)\n            ]\n            assert len(starts) > 0, (row.peptide, reference_row.seq)\n            for start in starts:\n                end_pos = start + len(row.peptide)\n                n_flank = reference_row.seq[\n                    max(start - args.flanking_length, 0) : start\n                ].rjust(args.flanking_length, 'X')\n                c_flank = reference_row.seq[\n                    end_pos : (end_pos + args.flanking_length)\n                ].ljust(args.flanking_length, 'X')\n                join_df.append((\n                    hit_id,\n                    match.doc_id,\n                    len(matches),\n                    len(starts),\n                    start,\n                    start / len(reference_row.seq),\n                    n_flank,\n                    c_flank,\n                ))\n\n        if args.debug_max_rows and len(join_df) > args.debug_max_rows:\n            break\n\n    join_df = pandas.DataFrame(\n        join_df,\n        columns=[\n            \"hit_id\",\n            \"match_index\",\n            \"num_proteins\",\n            \"num_occurrences_in_protein\",\n            \"start_position\",\n            \"start_fraction_in_protein\",\n            \"n_flank\",\n            \"c_flank\",\n        ]).drop_duplicates()\n\n    join_df[\"protein_accession\"] = join_df.match_index.map(\n        reference_df.index.to_series().reset_index(drop=True))\n\n    del join_df[\"match_index\"]\n\n    protein_cols = [\n        c for c in reference_df.columns\n        if c not in [\"name\", \"description\", \"seq\"]\n    ]\n    for col in protein_cols:\n        join_df[\"protein_%s\" % col] = join_df.protein_accession.map(\n            reference_df[col])\n\n    merged_df = pandas.merge(\n        join_df,\n        df,\n        how=\"left\",\n        left_on=\"hit_id\",\n        right_index=True)\n\n    merged_df.to_csv(args.out, index=False)\n    print(\"Wrote: %s\" % os.path.abspath(args.out))\n\n    if args.debug_max_rows:\n        # Leave user in a debugger\n        import ipdb\n        ipdb.set_trace()\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_mass_spec_annotated/requirements.txt",
    "content": "shellinford\n\n"
  },
  {
    "path": "downloads-generation/data_predictions/GENERATE.WITH_HPC_CLUSTER.sh",
    "content": "bash GENERATE.sh cluster reuse-predictions\n"
  },
  {
    "path": "downloads-generation/data_predictions/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# This download includes predictions for NetMHCpan 4.0 and MixMHCpred over a\n# large number of peptides encompassing almost the full proteome.\n#\n# Usage:\n# GENERATE.sh <local|cluster> <reuse-all|reuse-none|reuse-predictions|reuse-predictions-except-mhcflurry>\n#\n# The first choice listed above for each argument is the default.\n#\n# Meanings for these arguments:\n#\n# FIRST ARGUMENT: where to run\n# local             - run locally using NUM_JOBS cores.\n# cluster           - run on cluster.\n#\n# SECOND ARGUMENT: whether to reuse predictions from existing downloaded data\n# reuse-all         - reuse predictions and peptide / allele lists from existing\n#                     downloaded data_predictions.\n# reuse-none        - fully self-contained run; do not reuse anything.\n# reuse-predictions - reuse predictions but not peptide or allele lists. Any\n#                     new peptides not already included will be run.\n# reuse-predictions-except-mhcflurry\n#                   - Reuse predictions except for mhcflurry [1].\n# \n# [1] In an earlier version, this download also included predictions for MHCflurry. This is\n# no longer the case.\nset -e\nset -x\n\nDOWNLOAD_NAME=data_predictions\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\nexport PYTHONUNBUFFERED=1\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\ncp $SCRIPT_DIR/write_proteome_peptides.py .\ncp $SCRIPT_DIR/write_allele_list.py .\ncp $SCRIPT_DIR/run_predictors.py .\n\nif [ \"$1\" != \"cluster\" ]\nthen\n\n    GPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\n    echo \"Detected GPUS: $GPUS\"\n\n    PROCESSORS=$(getconf _NPROCESSORS_ONLN)\n    echo \"Detected processors: $PROCESSORS\"\n\n    if [ \"$GPUS\" -eq \"0\" ]; then\n       NUM_JOBS=${NUM_JOBS-1}\n    else\n        NUM_JOBS=${NUM_JOBS-$GPUS}\n    fi\n    echo \"Num jobs: $NUM_JOBS\"\n    EXTRA_ARGS+=\" --num-jobs $NUM_JOBS --max-tasks-per-worker 1 --gpus $GPUS --max-workers-per-gpu 1\"\nelse\n    EXTRA_ARGS+=\" --cluster-parallelism --cluster-max-retries 3 --cluster-submit-command bsub --cluster-results-workdir $HOME/mhcflurry-scratch\"\nfi\n\nPEPTIDES=$(mhcflurry-downloads path data_mass_spec_annotated)/annotated_ms.csv.bz2\nREFERENCES_DIR=$(mhcflurry-downloads path data_references)\n\nif [ \"${2:-reuse-none}\" != \"reuse-none\" ]\nthen\n    EXISTING_DATA=\"$(mhcflurry-downloads path $DOWNLOAD_NAME)\"\n    echo \"Will reuse data from $EXISTING_DATA\"\nelse\n    EXISTING_DATA=\"\"\n    echo \"Will NOT reuse any data\"\nfi\n\nmkdir predictions\n\n# Write out alleles\nif [ \"$2\" == \"reuse-all\" ]\nthen\n    echo \"Reusing allele list\"\n    cp \"$EXISTING_DATA/alleles.txt\" .\nelse\n    echo \"Generating allele list\"\n    python write_allele_list.py \"$PEPTIDES\" --out alleles.txt\nfi\n\n# Write out and process peptides.\nfor subset in all\ndo\n    if [ \"$2\" == \"reuse-all\" ]\n    then\n        echo \"Reusing peptide list\"\n        cp \"$EXISTING_DATA/proteome_peptides.$subset.csv.bz2\" .\n    else\n        echo \"Generating peptide list\"\n        SUBSET_ARG=\"\"\n        if [ \"$subset\" == \"chr1\" ]\n        then\n            SUBSET_ARG=\"--chromosome 1\"\n        fi\n        python write_proteome_peptides.py \\\n            \"$PEPTIDES\" \\\n            \"${REFERENCES_DIR}/uniprot_proteins.csv.bz2\" \\\n            --out proteome_peptides.$subset.csv $SUBSET_ARG\n        bzip2 proteome_peptides.$subset.csv\n    fi\n\n    # Run mixmhcpred\n    OUT_DIR=predictions/${subset}.mixmhcpred\n    REUSE=\"\"\n    if [ \"${2:-reuse-none}\" != \"reuse-none\" ]\n    then\n        REUSE=\"$EXISTING_DATA\"/$OUT_DIR\n    fi\n\n    python run_predictors.py \\\n        \"$(pwd)/proteome_peptides.$subset.csv.bz2\" \\\n        --result-dtype \"float16\" \\\n        --predictor mixmhcpred \\\n        --chunk-size 10000 \\\n        --allele $(cat alleles.txt) \\\n        --out \"$OUT_DIR\" \\\n        --worker-log-dir \"$SCRATCH_DIR/$DOWNLOAD_NAME\" \\\n        --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.nogpu.lsf \\\n        --reuse-predictions \"$REUSE\" $EXTRA_ARGS\n\n    # Run netmhcpan4\n    for kind in el ba\n    do\n        OUT_DIR=predictions/${subset}.netmhcpan4.$kind\n        REUSE=\"\"\n        if [ \"${2:-reuse-none}\" != \"reuse-none\" ]\n        then\n            REUSE=\"$EXISTING_DATA\"/$OUT_DIR\n        fi\n\n        python run_predictors.py \\\n            \"$(pwd)/proteome_peptides.$subset.csv.bz2\" \\\n            --result-dtype \"float16\" \\\n            --predictor netmhcpan4-$kind \\\n            --chunk-size 1000 \\\n            --allele $(cat alleles.txt) \\\n            --out \"$OUT_DIR\" \\\n            --worker-log-dir \"$SCRATCH_DIR/$DOWNLOAD_NAME\" \\\n            --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.nogpu.lsf \\\n            --reuse-predictions \"$REUSE\" $EXTRA_ARGS\n    done\n\n\n    # Run MHCflurry\n    #for kind in combined\n    #do\n    #    OUT_DIR=predictions/${subset}.mhcflurry.${kind}\n    #    REUSE=\"\"\n    #    if [ \"${2:-reuse-none}\" != \"reuse-none\" ] && [ \"${2:-reuse-none}\" != \"reuse-predictions-except-mhcflurry\" ]\n    #    then\n    #        REUSE=\"$EXISTING_DATA\"/$OUT_DIR\n    #    fi\n    #\n    #    python run_predictors.py \\\n    #        \"$(pwd)/proteome_peptides.$subset.csv.bz2\" \\\n    #        --result-dtype \"float16\" \\\n    #        --predictor mhcflurry \\\n    #        --chunk-size 500000 \\\n    #        --mhcflurry-batch-size 65536 \\\n    #        --mhcflurry-models-dir \"$(mhcflurry-downloads path models_class1_pan)/models.$kind\" \\\n    #        --allele $(cat alleles.txt) \\\n    #        --out \"$OUT_DIR\" \\\n    #        --worker-log-dir \"$SCRATCH_DIR/$DOWNLOAD_NAME\" \\\n    #        --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.gpu.lsf \\\n    #        --reuse-predictions \"$REUSE\" $EXTRA_ARGS\n    #done\ndone\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n\n# Split into <2GB chunks for GitHub\nPARTS=\"${RESULT}.part.\"\n# Check for pre-existing part files and rename them.\nfor i in $(ls \"${PARTS}\"* )\ndo\n    DEST=\"${i}.OLD.$(date +%s)\"\n    echo \"WARNING: already exists: $i . Moving to $DEST\"\n    mv $i $DEST\ndone\nsplit -b 2000m \"$RESULT\" \"$PARTS\"\necho \"Split into parts:\"\nls -lh \"${PARTS}\"*\n"
  },
  {
    "path": "downloads-generation/data_predictions/cluster_submit_script_header.mssm_hpc.gpu.lsf",
    "content": "#!/bin/bash\n#BSUB -J MHCf-{work_item_num} # Job name\n#BSUB -P acc_nkcancer # allocation account or Unix group\n#BSUB -q gpu # queue\n#BSUB -R rusage[ngpus_excl_p=1]  # 1 exclusive GPU\n#BSUB -R span[hosts=1] # one node\n#BSUB -n 1 # number of compute cores\n#BSUB -W 46:00 # walltime in HH:MM\n#BSUB -R rusage[mem=30000] # mb memory requested\n#BSUB -o {work_dir}/%J.stdout # output log (%J : JobID)\n#BSUB -eo {work_dir}/STDERR # error log\n#BSUB -L /bin/bash # Initialize the execution environment\n#\n\nset -e\nset -x\n\necho \"Subsequent stderr output redirected to stdout\" >&2\nexec 2>&1\n\nexport TMPDIR=/local/JOBS/mhcflurry-{work_item_num}\nexport PATH=$HOME/.conda/envs/py36b/bin/:$PATH\nexport PYTHONUNBUFFERED=1\nexport KMP_SETTINGS=1\nexport NETMHC_BUNDLE_HOME=$HOME/sinai/git/netmhc-bundle\nexport NETMHC_BUNDLE_TMPDIR=/local/JOBS/netmhctmp-{work_item_num}\nexport PATH=$NETMHC_BUNDLE_HOME/bin:$PATH\n\nfree -m\n\nmodule add cuda/10.0.130 cudnn/7.1.1\nmodule list\n\n# python -c 'import tensorflow as tf ; print(\"GPU AVAILABLE\" if tf.test.is_gpu_available() else \"GPU NOT AVAILABLE\")'\n\nenv\n\ncd {work_dir}\n\n"
  },
  {
    "path": "downloads-generation/data_predictions/cluster_submit_script_header.mssm_hpc.nogpu.lsf",
    "content": "#!/bin/bash\n#BSUB -J MHCf-{work_item_num} # Job name\n#BSUB -P acc_nkcancer # allocation account or Unix group\n#BSUB -q express # queue\n#BSUB -R span[hosts=1] # one node\n#BSUB -n 1 # number of compute cores\n#BSUB -W 11:00 # walltime in HH:MM\n#BSUB -R rusage[mem=10000] # mb memory requested\n#BSUB -o {work_dir}/%J.stdout # output log (%J : JobID)\n#BSUB -eo {work_dir}/STDERR # error log\n#BSUB -L /bin/bash # Initialize the execution environment\n#\n\nset -e\nset -x\n\necho \"Subsequent stderr output redirected to stdout\" >&2\nexec 2>&1\n\nexport TMPDIR=/local/JOBS/mhcflurry-{work_item_num}\nexport PATH=$HOME/.conda/envs/py36b/bin/:$PATH\nexport PYTHONUNBUFFERED=1\nexport KMP_SETTINGS=1\nexport NETMHC_BUNDLE_HOME=$HOME/sinai/git/netmhc-bundle\nexport NETMHC_BUNDLE_TMPDIR=/local/JOBS/netmhctmp-{work_item_num}\nexport PATH=$NETMHC_BUNDLE_HOME/bin:$PATH\n\ncd {work_dir}\n\nmkdir -p \"$NETMHC_BUNDLE_TMPDIR\"\nmkdir -p \"$TMPDIR\"\n\nchmod 777 \"$NETMHC_BUNDLE_TMPDIR\"\nchmod 777 \"$TMPDIR\"\n\nls \"$NETMHC_BUNDLE_TMPDIR\"\nls \"$TMPDIR\"\n\nnetMHCpan-4.0 -listMHC\nMixMHCpred -h\nfree -m\nenv\n\n\n"
  },
  {
    "path": "downloads-generation/data_predictions/requirements.txt",
    "content": "mhctools\n"
  },
  {
    "path": "downloads-generation/data_predictions/run_predictors.py",
    "content": "\"\"\"\n\"\"\"\nimport argparse\nimport os\nimport signal\nimport sys\nimport time\nimport traceback\nimport math\nimport collections\nfrom functools import partial\n\nimport numpy\nimport pandas\n\nfrom mhcflurry.common import normalize_allele_name\nimport tqdm  # progress bar\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\nfrom mhcflurry.common import configure_logging\nfrom mhcflurry.local_parallelism import (\n    add_local_parallelism_args,\n    worker_pool_with_gpu_assignments_from_args,\n    call_wrapped_kwargs)\nfrom mhcflurry.cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"input_peptides\",\n    metavar=\"CSV\",\n    help=\"CSV file with 'peptide' column\")\nparser.add_argument(\n    \"--predictor\",\n    required=True,\n    choices=(\"mhcflurry\", \"netmhcpan4-ba\", \"netmhcpan4-el\", \"mixmhcpred\"))\nparser.add_argument(\n    \"--mhcflurry-models-dir\",\n    metavar=\"DIR\",\n    help=\"Directory to read MHCflurry models\")\nparser.add_argument(\n    \"--mhcflurry-batch-size\",\n    type=int,\n    default=4096,\n    help=\"Batch size for MHCflurry predictions. Default: %(default)s\")\nparser.add_argument(\n    \"--allele\",\n    default=None,\n    required=True,\n    nargs=\"+\",\n    help=\"Alleles to predict\")\nparser.add_argument(\n    \"--chunk-size\",\n    type=int,\n    default=100000,\n    help=\"Num peptides per job. Default: %(default)s\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"DIR\",\n    help=\"Write results to DIR\")\nparser.add_argument(\n    \"--max-peptides\",\n    type=int,\n    help=\"Max peptides to process. For debugging.\",\n    default=None)\nparser.add_argument(\n    \"--reuse-predictions\",\n    metavar=\"DIR\",\n    nargs=\"*\",\n    help=\"Take predictions from indicated DIR instead of re-running them\")\nparser.add_argument(\n    \"--result-dtype\",\n    default=\"float32\",\n    help=\"Numpy dtype of result. Default: %(default)s.\")\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\nPREDICTOR_TO_COLS = {\n    \"mhcflurry\": [\"affinity\"],\n    \"netmhcpan4-ba\": [\"affinity\", \"percentile_rank\"],\n    \"netmhcpan4-el\": [\"score\"],\n    \"mixmhcpred\": [\"score\"],\n}\n\n\ndef load_results(dirname, result_df=None, dtype=\"float32\"):\n    peptides = pandas.read_csv(\n        os.path.join(dirname, \"peptides.csv\")).peptide\n    manifest_df = pandas.read_csv(os.path.join(dirname, \"alleles.csv\"))\n\n    print(\n        \"Loading results. Existing data has\",\n        len(peptides),\n        \"peptides and\",\n        len(manifest_df),\n        \"columns\")\n\n    # Make adjustments for old style data. Can be removed later.\n    if \"kind\" not in manifest_df.columns:\n        manifest_df[\"kind\"] = \"affinity\"\n    if \"col\" not in manifest_df.columns:\n        manifest_df[\"col\"] = manifest_df.allele + \" \" + manifest_df.kind\n\n    if result_df is None:\n        result_df = pandas.DataFrame(\n            index=peptides,\n            columns=manifest_df.col.values,\n            dtype=dtype)\n        result_df[:] = numpy.nan\n        peptides_to_assign = peptides\n        mask = None\n    else:\n        manifest_df = manifest_df.loc[manifest_df.col.isin(result_df.columns)]\n        mask = (peptides.isin(result_df.index)).values\n        peptides_to_assign = peptides[mask]\n\n    print(\"Will load\", len(peptides), \"peptides and\", len(manifest_df), \"cols\")\n\n    for _, row in tqdm.tqdm(manifest_df.iterrows(), total=len(manifest_df)):\n        with open(os.path.join(dirname, row.path), \"rb\") as fd:\n            value = numpy.load(fd)['arr_0']\n            if mask is not None:\n                value = value[mask]\n            result_df.loc[peptides_to_assign, row.col] = value\n\n    return result_df\n\n\ndef run(argv=sys.argv[1:]):\n    global GLOBAL_DATA\n\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    configure_logging()\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n\n    alleles = [\n        normalize_allele_name(a, raise_on_error=False) for a in args.allele\n    ]\n    n_bad_alleles = sum([a is None for a in alleles])\n    if n_bad_alleles > 0:\n        print(\"Dropping %d bad alleles\" % n_bad_alleles)\n\n    alleles = numpy.array(sorted({a for a in alleles if a}))\n\n    peptides = pandas.read_csv(\n        args.input_peptides, nrows=args.max_peptides).peptide.drop_duplicates()\n    print(\"Filtering to valid peptides. Starting at: \", len(peptides))\n    peptides = peptides[peptides.str.match(\"^[ACDEFGHIKLMNPQRSTVWY]+$\")]\n    print(\"Filtered to: \", len(peptides))\n    peptides = peptides.unique()\n    num_peptides = len(peptides)\n\n    print(\"Predictions for %d alleles x %d peptides.\" % (\n        len(alleles), num_peptides))\n\n    if not os.path.exists(args.out):\n        print(\"Creating\", args.out)\n        os.mkdir(args.out)\n\n    GLOBAL_DATA[\"predictor\"] = args.predictor\n    GLOBAL_DATA[\"args\"] = args\n    GLOBAL_DATA[\"cols\"] = PREDICTOR_TO_COLS[args.predictor]\n\n    # Write peptide and allele lists to out dir.\n    out_peptides = os.path.abspath(os.path.join(args.out, \"peptides.csv\"))\n    pandas.DataFrame({\"peptide\": peptides}).to_csv(out_peptides, index=False)\n    print(\"Wrote: \", out_peptides)\n\n    manifest_df = []\n    for allele in alleles:\n        for col in PREDICTOR_TO_COLS[args.predictor]:\n            manifest_df.append((allele, col))\n    manifest_df = pandas.DataFrame(\n        manifest_df, columns=[\"allele\", \"kind\"])\n    manifest_df[\"col\"] = (\n            manifest_df.allele + \" \" + manifest_df.kind)\n    manifest_df[\"path\"] = manifest_df.col.map(\n        lambda s: s.replace(\"*\", \"\").replace(\" \", \".\")) + \".npz\"\n    out_manifest = os.path.abspath(os.path.join(args.out, \"alleles.csv\"))\n    manifest_df.to_csv(out_manifest, index=False)\n    col_to_filename = manifest_df.set_index(\"col\").path.map(\n        lambda s: os.path.abspath(os.path.join(args.out, s)))\n    print(\"Wrote: \", out_manifest)\n\n    result_df = pandas.DataFrame(\n        index=peptides, columns=manifest_df.col.values, dtype=args.result_dtype)\n    result_df[:] = numpy.nan\n\n    if args.reuse_predictions:\n        # Allocating this here to hit any memory errors as early as possible.\n        is_null_matrix = numpy.ones(\n            shape=(result_df.shape[0], len(alleles)), dtype=\"int8\")\n\n        for dirname in args.reuse_predictions:\n            if not dirname:\n                continue  # ignore empty strings\n            if os.path.exists(dirname):\n                print(\"Loading predictions\", dirname)\n                result_df = load_results(\n                    dirname, result_df, dtype=args.result_dtype)\n            else:\n                print(\"WARNING: skipping because does not exist\", dirname)\n\n        # We rerun any alleles that have nulls for any kind of values\n        # (e.g. affinity, percentile rank, elution score).\n        for (i, allele) in enumerate(alleles):\n            sub_df = manifest_df.loc[manifest_df.allele == allele]\n            is_null_matrix[:, i] = result_df[sub_df.col.values].isnull().any(1)\n        print(\"Fraction null\", is_null_matrix.mean())\n\n        print(\"Grouping peptides by alleles\")\n        allele_indices_to_peptides = collections.defaultdict(list)\n        for (i, peptide) in tqdm.tqdm(enumerate(peptides), total=len(peptides)):\n            (allele_indices,) = numpy.where(is_null_matrix[i])\n            if len(allele_indices) > 0:\n                allele_indices_to_peptides[tuple(allele_indices)].append(peptide)\n\n        del is_null_matrix\n\n        work_items = []\n        print(\"Assigning peptides to work items.\")\n        for (indices, block_peptides) in allele_indices_to_peptides.items():\n            num_chunks = int(math.ceil(len(block_peptides) / args.chunk_size))\n            peptide_chunks = numpy.array_split(peptides, num_chunks)\n            for chunk_peptides in peptide_chunks:\n                work_items.append({\n                    'alleles': alleles[list(indices)],\n                    'peptides': chunk_peptides,\n                })\n    else:\n        # Same number of chunks for all alleles\n        num_chunks = int(math.ceil(len(peptides) / args.chunk_size))\n        print(\"Splitting peptides into %d chunks\" % num_chunks)\n        peptide_chunks = numpy.array_split(peptides, num_chunks)\n\n        work_items = []\n        for (_, chunk_peptides) in enumerate(peptide_chunks):\n            work_item = {\n                'alleles': alleles,\n                'peptides': chunk_peptides,\n            }\n            work_items.append(work_item)\n    print(\"Work items: \", len(work_items))\n\n    for (i, work_item) in enumerate(work_items):\n        work_item[\"work_item_num\"] = i\n\n    # Combine work items to form tasks.\n    tasks = []\n    peptides_in_last_task = None\n    # We sort work_items to put small items first so they get combined.\n    for work_item in sorted(work_items, key=lambda d: len(d['peptides'])):\n        if peptides_in_last_task is not None and (\n                len(work_item['peptides']) +\n                peptides_in_last_task < args.chunk_size):\n\n            # Add to last task.\n            tasks[-1]['work_item_dicts'].append(work_item)\n            peptides_in_last_task += len(work_item['peptides'])\n        else:\n            # New task\n            tasks.append({'work_item_dicts': [work_item]})\n            peptides_in_last_task = len(work_item['peptides'])\n\n    print(\"Collected %d work items into %d tasks\" % (\n        len(work_items), len(tasks)))\n\n    if args.predictor == \"mhcflurry\":\n        do_predictions_function = do_predictions_mhcflurry\n    else:\n        do_predictions_function = do_predictions_mhctools\n\n    worker_pool = None\n    start = time.time()\n    if serial_run:\n        # Serial run\n        print(\"Running in serial.\")\n        results = (\n            do_predictions_function(**task) for task in tasks)\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        print(\"Running on cluster.\")\n        results = cluster_results_from_args(\n            args,\n            work_function=do_predictions_function,\n            work_items=tasks,\n            constant_data=GLOBAL_DATA,\n            input_serialization_method=\"dill\",\n            result_serialization_method=\"pickle\",\n            clear_constant_data=True)\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n        results = worker_pool.imap_unordered(\n            partial(call_wrapped_kwargs, do_predictions_function),\n            tasks,\n            chunksize=1)\n\n    allele_to_chunk_index_to_predictions = {}\n    for allele in alleles:\n        allele_to_chunk_index_to_predictions[allele] = {}\n\n    def write_col(col):\n        out_path = os.path.join(\n            args.out, col_to_filename[col])\n        numpy.savez(out_path, result_df[col].values)\n        print(\n            \"Wrote [%f%% null]:\" % (\n                result_df[col].isnull().mean() * 100.0),\n            out_path)\n\n    print(\"Writing all columns.\")\n    last_write_time_per_column = {}\n    for col in result_df.columns:\n        write_col(col)\n        last_write_time_per_column[col] = time.time()\n    print(\"Done writing all columns. Reading results.\")\n\n    for worker_results in tqdm.tqdm(results, total=len(work_items)):\n        for (work_item_num, col_to_predictions) in worker_results:\n            for (col, predictions) in col_to_predictions.items():\n                result_df.loc[\n                    work_items[work_item_num]['peptides'],\n                    col\n                ] = predictions\n                if time.time() - last_write_time_per_column[col] > 180:\n                    write_col(col)\n                    last_write_time_per_column[col] = time.time()\n\n    print(\"Done processing. Final write for each column.\")\n    for col in result_df.columns:\n        write_col(col)\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n    prediction_time = time.time() - start\n    print(\"Done generating predictions in %0.2f min.\" % (\n        prediction_time / 60.0))\n\n\ndef do_predictions_mhctools(work_item_dicts, constant_data=None):\n    \"\"\"\n    Each tuple of work items consists of:\n\n    (work_item_num, peptides, alleles)\n\n    \"\"\"\n\n    # This may run on the cluster in a way that misses all top level imports,\n    # so we have to re-import everything here.\n    import time\n    import numpy\n    import pandas\n    import numpy.testing\n    import mhctools\n\n    if constant_data is None:\n        constant_data = GLOBAL_DATA\n\n    cols = constant_data['cols']\n    predictor_name = constant_data['args'].predictor\n\n    results = []\n    for (i, d) in enumerate(work_item_dicts):\n        work_item_num = d['work_item_num']\n        peptides = d['peptides']\n        alleles = d['alleles']\n\n        print(\"Processing work item\", i + 1, \"of\", len(work_item_dicts))\n        result = {}\n        results.append((work_item_num, result))\n\n        if predictor_name == \"netmhcpan4-ba\":\n            predictor = mhctools.NetMHCpan4(\n                alleles=alleles,\n                program_name=\"netMHCpan-4.0\",\n                mode=\"binding_affinity\")\n        elif predictor_name == \"netmhcpan4-el\":\n            predictor = mhctools.NetMHCpan4(\n                alleles=alleles,\n                program_name=\"netMHCpan-4.0\",\n                mode=\"elution_score\")\n        elif predictor_name == \"mixmhcpred\":\n            # Empirically determine supported alleles.\n            mixmhcpred_usable_alleles = []\n            unusable_alleles = []\n            for allele in alleles:\n                predictor = mhctools.MixMHCpred(alleles=[allele])\n\n                # We use inf not nan to indicate unsupported alleles since\n                # we use nan to indicate incomplete results that still need\n                # to execute.\n                empty_results = pandas.Series(index=peptides,\n                    dtype=numpy.float16)\n                empty_results[:] = float('-inf')\n                try:\n                    predictor.predict_peptides_dataframe([\"PEPTIDESS\"])\n                    mixmhcpred_usable_alleles.append(allele)\n                except ValueError:\n                    unusable_alleles.append(allele)\n                    for col in cols:\n                        result[\"%s %s\" % (allele, col)] = empty_results.values\n\n            print(\"MixMHCpred usable alleles: \", *mixmhcpred_usable_alleles)\n            print(\"MixMHCpred unusable alleles: \", *unusable_alleles)\n            predictor = mhctools.MixMHCpred(alleles=mixmhcpred_usable_alleles)\n            assert mixmhcpred_usable_alleles, mixmhcpred_usable_alleles\n        else:\n            raise ValueError(\"Unsupported\", predictor_name)\n\n        start = time.time()\n        df = predictor.predict_peptides_dataframe(peptides)\n        print(\"Predicted for %d peptides x %d alleles in %0.2f sec.\" % (\n            len(peptides), len(alleles), (time.time() - start)))\n\n        for (allele, sub_df) in df.groupby(\"allele\"):\n            for col in cols:\n                result[\"%s %s\" % (allele, col)] = (\n                    sub_df[col].values.astype(\n                        constant_data['args'].result_dtype))\n    return results\n\n\ndef do_predictions_mhcflurry(work_item_dicts, constant_data=None):\n    \"\"\"\n    Each dict of work items should have keys: work_item_num, peptides, alleles\n\n    \"\"\"\n\n    # This may run on the cluster in a way that misses all top level imports,\n    # so we have to re-import everything here.\n    import time\n    from mhcflurry.encodable_sequences import EncodableSequences\n    from mhcflurry import Class1AffinityPredictor\n\n    if constant_data is None:\n        constant_data = GLOBAL_DATA\n\n    args = constant_data['args']\n\n    assert args.predictor == \"mhcflurry\"\n    assert constant_data['cols'] == [\"affinity\"]\n\n    predictor = Class1AffinityPredictor.load(args.mhcflurry_models_dir)\n\n    results = []\n    for (i, d) in enumerate(work_item_dicts):\n        work_item_num = d['work_item_num']\n        peptides = d['peptides']\n        alleles = d['alleles']\n\n        print(\"Processing work item\", i + 1, \"of\", len(work_item_dicts))\n        result = {}\n        results.append((work_item_num, result))\n        start = time.time()\n        peptides = EncodableSequences.create(peptides)\n        for (i, allele) in enumerate(alleles):\n            print(\"Processing allele %d / %d: %0.2f sec elapsed\" % (\n                i + 1, len(alleles), time.time() - start))\n            for col in [\"affinity\"]:\n                result[\"%s %s\" % (allele, col)] = predictor.predict(\n                    peptides=peptides,\n                    allele=allele,\n                    throw=False,\n                    model_kwargs={\n                        'batch_size': args.mhcflurry_batch_size,\n                    }).astype(constant_data['args'].result_dtype)\n        print(\"Done predicting in\", time.time() - start, \"sec\")\n    return results\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_predictions/write_allele_list.py",
    "content": "\"\"\"\n\"\"\"\nimport sys\nimport argparse\nimport os\n\nimport pandas\nimport tqdm  # progress bar\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"input\",\n    metavar=\"FILE.csv\",\n    help=\"CSV of annotated mass spec hits\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"OUT.txt\",\n    help=\"Out file path\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n\n    df = pandas.read_csv(args.input)\n    print(\"Read peptides\", df.shape, *df.columns.tolist())\n\n    df = df.loc[df.mhc_class == \"I\"]\n\n    hla_sets = df.hla.unique()\n    all_hla = set()\n    for hla_set in hla_sets:\n        all_hla.update(hla_set.split())\n\n    all_hla = pandas.Series(sorted(all_hla))\n    all_hla.to_csv(args.out, index=False, header=False)\n    print(\"Wrote [%d alleles]: %s\" % (len(all_hla), os.path.abspath(args.out)))\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_predictions/write_proteome_peptides.py",
    "content": "\"\"\"\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport time\nimport collections\nimport re\nfrom six.moves import StringIO\n\nimport pandas\nimport tqdm  # progress bar\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"input\",\n    metavar=\"FILE.csv\",\n    help=\"CSV of annotated mass spec hits\")\nparser.add_argument(\n    \"reference_csv\",\n    metavar=\"FILE.csv\",\n    help=\"CSV of protein sequences\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"OUT.csv\",\n    help=\"Out file path\")\nparser.add_argument(\n    \"--chromosome\",\n    metavar=\"CHR\",\n    nargs=\"+\",\n    help=\"Use only proteins from the specified chromosome(s)\")\nparser.add_argument(\n    \"--debug-max-rows\",\n    metavar=\"N\",\n    type=int,\n    default=None,\n    help=\"Max rows to process. Useful for debugging. If specified an ipdb \"\n    \"debugging session is also opened at the end of the script\")\nparser.add_argument(\n    \"--lengths\",\n    metavar=\"N\",\n    type=int,\n    nargs=\"+\",\n    default=[8,9,10,11],\n    help=\"Peptide lengths\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n\n    df_original = pandas.read_csv(args.input)\n    df = df_original\n    print(\"Read peptides\", df.shape, *df.columns.tolist())\n\n    reference_df = pandas.read_csv(args.reference_csv, index_col=0)\n    reference_df = reference_df.set_index(\"accession\")\n    print(\"Read proteins\", reference_df.shape, *reference_df.columns.tolist())\n\n    print(\"Subselecting to MHC I hits. Before: \", len(df))\n    df = df.loc[df.mhc_class == \"I\"]\n    print(\"After: \", len(df))\n\n    print(\"Subselecting to gene-associated hits. Before: \", len(df))\n    df = df.loc[~df.protein_ensembl_primary.isnull()]\n    print(\"After: \", len(df))\n\n    if args.chromosome:\n        print(\"Subselecting to chromosome(s): \", *args.chromosome)\n        print(\"Before: \", len(df))\n        df = df.loc[df.protein_primary_ensembl_contig.isin(args.chromosome)]\n        print(\"After: \", len(df))\n\n    (flanking_length,) = list(\n        set(df.n_flank.str.len().unique()).union(\n            set(df.n_flank.str.len().unique())))\n    print(\"Flanking length\", flanking_length)\n\n    proteins = df.protein_accession.unique()\n\n    if args.debug_max_rows:\n        proteins = proteins[:args.debug_max_rows]\n\n    print(\"Writing decoys for %d proteins\" % len(proteins))\n\n    reference_df = reference_df.loc[proteins]\n\n    lengths = sorted(args.lengths)\n    rows = []\n    total = len(reference_df)\n    for (accession, info) in tqdm.tqdm(reference_df.iterrows(), total=total):\n        seq = info.seq\n        for start in range(0, len(seq) - min(args.lengths)):\n            for length in lengths:\n                end_pos = start + length\n                if end_pos > len(seq):\n                    break\n                n_flank = seq[\n                    max(start - flanking_length, 0) : start\n                ].rjust(flanking_length, 'X')\n                c_flank = seq[\n                    end_pos : (end_pos + flanking_length)\n                ].ljust(flanking_length, 'X')\n                peptide = seq[start : start + length]\n\n                rows.append((\n                    accession,\n                    peptide,\n                    n_flank,\n                    c_flank,\n                    start\n                ))\n\n    result_df = pandas.DataFrame(\n        rows,\n        columns=[\n            \"protein_accession\",\n            \"peptide\",\n            \"n_flank\",\n            \"c_flank\",\n            \"start_position\",\n        ])\n\n    result_df.to_csv(args.out, index=False)\n    print(\"Wrote: %s\" % os.path.abspath(args.out))\n\n    if args.debug_max_rows:\n        # Leave user in a debugger\n        import ipdb\n        ipdb.set_trace()\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_published/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Download published non-IEDB MHC I ligand data. Most data has made its way into\n# IEDB but not all. Here we gather up the rest.\n#\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=data_published\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\ndate\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\n\n############################################\n# BINDING AFFINITIES: class I\n############################################\n#\n# Kim et al 2014 [PMID 25017736]\nwget -q https://github.com/openvax/mhcflurry/releases/download/pre-1.1/bdata.2009.mhci.public.1.txt\nwget -q https://github.com/openvax/mhcflurry/releases/download/pre-1.1/bdata.20130222.mhci.public.1.txt\nwget -q https://github.com/openvax/mhcflurry/releases/download/pre-1.1/bdata.2013.mhci.public.blind.1.txt\n\nmkdir ms\n\n############################################\n# MS: Class I\n############################################\n# Bassani-Sternberg, ..., Gfeller PLOS Comp. Bio. 2017 [PMID 28832583]\n# The first dataset is from this work. The second dataset is originally from:\n#   Pearson, ..., Perreault JCI 2016 [PMID 27841757]\n# but was reanalyzed in this work, and we download the reanalyzed version here.\nPMID=28832583\nmkdir -p ms/$PMID\nwget -q https://doi.org/10.1371/journal.pcbi.1005725.s002 -P ms/$PMID # data generated in this work\nwget -q https://doi.org/10.1371/journal.pcbi.1005725.s003 -P ms/$PMID # data reanalyzed in this work\ncd ms/$PMID\nunzip *.s002\nunzip *.s003\nmkdir saved\nmv Dataset*/Dataset*.txt saved\nrm -rf Dataset* *.s002 *.s003 _*\nmv saved/* .\nrmdir saved\ncd ../..\n\n# Bassani-Sternberg, ..., Mann Mol Cell Proteomics 2015 [PMID 25576301]\nPMID=25576301\nmkdir -p ms/$PMID\nwget -q https://www.mcponline.org/highwire/filestream/35026/field_highwire_adjunct_files/7/mcp.M114.042812-4.xlsx -P ms/$PMID\n\n# Mommen, ..., Heck PNAS 2014 [PMID 24616531]\nPMID=24616531\nmkdir -p ms/$PMID\nwget -q https://www.pnas.org/highwire/filestream/615485/field_highwire_adjunct_files/1/sd01.xlsx -P ms/$PMID\n\n# Gloger, ..., Neri Cancer Immunol Immunother 2016 [PMID 27600516]\n# Data extracted from supplemental PDF table.\nPMID=27600516\nmkdir -p ms/$PMID\nwget -q https://github.com/openvax/mhcflurry/releases/download/pan-dev1/27600516.peptides.csv -P ms/$PMID\n\n# Ritz, ..., Fugmann Proteomics 2016 [PMID 26992070]\n# Supplemental zip downloaded from publication\nPMID=26992070\nmkdir -p ms/$PMID\nwget -q https://github.com/openvax/mhcflurry/releases/download/pan-dev1/pmic12297-sup-0001-supinfo.zip -P ms/$PMID\ncd ms/$PMID\nunzip pmic12297-sup-0001-supinfo.zip\ncd ../..\n\n# Shraibman, ..., Admon Mol Cell Proteomics\t2016 [PMID 27412690]\nPMID=27412690\nmkdir -p ms/$PMID\nwget -q https://www.mcponline.org/lookup/suppl/doi:10.1074/mcp.M116.060350/-/DC1/mcp.M116.060350-2.xlsx -P ms/$PMID\n\n# Pearson, ..., Perreault J Clin Invest 2016 [PMID 27841757]\n# Note: we do not use the original data from this publicaton, we use 28832583's reanalysis of it.\n#\n\n# Hassan, ..., van Veelen Mol Cell Proteomics 2015 [PMID 23481700]\nPMID=23481700\nmkdir -p ms/$PMID\nwget -q https://www.mcponline.org/highwire/filestream/34681/field_highwire_adjunct_files/1/mcp.M112.024810-2.xls -P ms/$PMID\n\n# Shraibman, ..., Admon Mol Cell Proteomics 2019 [PMID 31154438]\nPMID=31154438\nmkdir -p ms/$PMID\nwget -q https://www.mcponline.org/highwire/filestream/51948/field_highwire_adjunct_files/3/zjw006195963st2.txt -P ms/$PMID\nwget -q https://www.mcponline.org/highwire/filestream/51948/field_highwire_adjunct_files/1/zjw006195963st1.xlsx -P ms/$PMID\n\n# Bassani-Sternberg, ..., Krackhardt Nature Comm. 2016 [PMID 27869121]\nPMID=27869121\nmkdir -p ms/$PMID\nwget -q \"https://static-content.springer.com/esm/art%3A10.1038%2Fncomms13404/MediaObjects/41467_2016_BFncomms13404_MOESM1318_ESM.xlsx\" -P ms/$PMID\n\n# Sarkizova, ..., Keskin Nature Biotechnology 2019 [PMID 31844290]\nPMID=31844290\nmkdir -p ms/$PMID\n# Monoallelic:\nwget -q \"https://static-content.springer.com/esm/art%3A10.1038%2Fs41587-019-0322-9/MediaObjects/41587_2019_322_MOESM3_ESM.xlsx\" -P ms/$PMID\n# Multiallelic:\nwget -q \"https://static-content.springer.com/esm/art%3A10.1038%2Fs41587-019-0322-9/MediaObjects/41587_2019_322_MOESM4_ESM.xlsx\" -P ms/$PMID\n\n\n############################################\n# MS: Class II\n############################################\n# Abelin, ..., Rooney Immunity 2019 [PMID 31495665]\nPMID=31495665\nmkdir -p ms/$PMID\nwget -q https://ars.els-cdn.com/content/image/1-s2.0-S1074761319303632-mmc2.xlsx -P ms/$PMID\n\n\n############################################\n# RNA-seq expression data (TPMs)\n############################################\n# CCLE as processed by expression atlas\nDATASET=expression-atlas-22460905\nmkdir -p expression/$DATASET\nwget -q https://www.ebi.ac.uk/gxa/experiments-content/E-MTAB-2770/resources/ExperimentDownloadSupplier.RnaSeqBaseline/tpms.tsv -P expression/$DATASET\n\n# Human protein atlas\nDATASET=human-protein-atlas\nmkdir -p expression/$DATASET\ncd expression/$DATASET\nwget -q https://www.proteinatlas.org/download/rna_celline.tsv.zip\nwget -q https://www.proteinatlas.org/download/rna_blood_cell_sample_tpm_m.tsv.zip\nwget -q https://www.proteinatlas.org/download/rna_tissue_gtex.tsv.zip\nfor i in $(ls *.zip)\ndo\n    unzip $i\n    rm $i\ndone\ncd ../..\n\n# Melanoma. Original publication\n# Barry, ..., Krummel Nature Medicine 2018 [PMID 29942093].\nDATASET=GSE113126\nmkdir -p expression/$DATASET \ncd expression/$DATASET\nwget -q \"https://www.ncbi.nlm.nih.gov/geo/download/?acc=GSE113126&format=file\" -O GSE113126_RAW.tar\ntar -xvf GSE113126_RAW.tar\nrm GSE113126_RAW.tar\ncd ../..\n\n############################################\n# T cell epitopes: class I\n############################################\n#\n# Koşaloğlu-Yalçın, ..., Peters. Oncoimmunology 2018 [PMID 30377561]\n#\nPMID=30377561\nmkdir -p epitopes/$PMID\nwget -q https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6204999/bin/koni-07-11-1492508-s001.zip -P epitopes/$PMID\ncd epitopes/$PMID\nunzip *.zip\nrm -f *.jpg\ncd ../..\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n\n"
  },
  {
    "path": "downloads-generation/data_published/README.md",
    "content": "# Published datasets\n\nThese datasets are derived from publications and do not change.\n\nTo generate this download run:\n\n```\n./GENERATE.sh\n```\n\nThis download contains the BD2009, BD2013, and BLIND datasets from\n[Dataset size and composition impact the reliability of performance benchmarks for peptide-MHC binding predictions](http://bmcbioinformatics.biomedcentral.com/articles/10.1186/1471-2105-15-241).\n\nBD2013 (augmented with more recent data from IEDB) are used to train the production\nMHCflurry models. BD2009 and BLIND are useful for performing validation on held-out data.\n\nThe other published data sets correspond to the publications indicated in GENERATE.sh.\n\n"
  },
  {
    "path": "downloads-generation/data_references/GENERATE.sh",
    "content": "#!/bin/bash\n#\n#\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=data_references\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\ndate\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\ncp $SCRIPT_DIR/process.py .\n\n\n############################################\n# UNIPROT\n############################################\n#\nwget -q ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/reference_proteomes/Eukaryota/UP000005640_9606.fasta.gz\nwget -q ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/reference_proteomes/Eukaryota/UP000005640_9606.gene2acc.gz\nwget -q ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/reference_proteomes/Eukaryota/UP000005640_9606.idmapping.gz\nwget -q ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/reference_proteomes/Eukaryota/UP000005640_9606_DNA.fasta.gz\nwget -q ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/reference_proteomes/Eukaryota/UP000005640_9606_DNA.miss.gz\nwget -q ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/reference_proteomes/Eukaryota/UP000005640_9606_additional.fasta.gz\nwget -q ftp://ftp.ensembl.org/pub/release-98/gtf/homo_sapiens/Homo_sapiens.GRCh38.98.gtf.gz\n\npython process.py \\\n    UP000005640_9606.fasta.gz UP000005640_9606_additional.fasta.gz \\\n    --id-mapping UP000005640_9606.idmapping.gz \\\n    --ensembl-gtf Homo_sapiens.GRCh38.98.gtf.gz \\\n    --out-csv uniprot_proteins.csv \\\n    --out-index uniprot_proteins.fm\n\nls -lh uniprot_proteins.csv uniprot_proteins.fm\n\nbzip2 uniprot_proteins.csv\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/data_references/README.md",
    "content": "# data_mass_spec_annotated\n\nOn OS X, if you encounter problem installing shellinford, try this:\n\n```\nCXXFLAGS=\"-stdlib=libc++\" CPPFLAGS=\"-stdlib=libc++\" pip install shellinford\n```\n"
  },
  {
    "path": "downloads-generation/data_references/process.py",
    "content": "\"\"\"\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport gzip\n\nimport pandas\n\nimport gtfparse\nimport shellinford\nfrom Bio import SeqIO\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"input_paths\",\n    nargs=\"+\",\n    help=\"Fasta files to process\")\nparser.add_argument(\n    \"--out-csv\",\n    required=True,\n    metavar=\"FILE.csv\",\n    help=\"CSV output\")\nparser.add_argument(\n    \"--out-index\",\n    required=True,\n    metavar=\"FILE.fm\",\n    help=\"Index output\")\nparser.add_argument(\n    \"--id-mapping\",\n    required=True,\n    metavar=\"FILE.idmapping.gz\",\n    help=\"Uniprot mapping file\")\nparser.add_argument(\n    \"--ensembl-gtf\",\n    required=True,\n    metavar=\"FILE.gtf.gz\",\n    help=\"Ensembl GTF file\")\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n\n    fm = shellinford.FMIndex()\n    df = []\n    for f in args.input_paths:\n        print(\"Processing\", f)\n        with gzip.open(f, \"rt\") as fd:\n            records = SeqIO.parse(fd, format='fasta')\n            for (i, record) in enumerate(records):\n                seq = str(record.seq).upper()\n                df.append((record.name, record.description, seq))\n                fm.push_back(\"$\" + seq + \"$\")  # include sentinels\n    df = pandas.DataFrame(df, columns=[\"name\", \"description\", \"seq\"])\n\n    print(\"Done reading fastas\")\n    print(df)\n\n    pieces = df.name.str.split(\"|\")\n    df[\"db\"] = pieces.str.get(0)\n    df[\"accession\"] = pieces.str.get(1)\n    df[\"entry\"] = pieces.str.get(2)\n\n    print(\"Annotating using mapping\", args.id_mapping)\n    mapping_df = pandas.read_csv(\n        args.id_mapping, sep=\"\\t\", header=None)\n    mapping_df.columns = ['accession', 'key', 'value']\n\n    for item in [\"Ensembl\", \"Ensembl_TRS\", \"Gene_Name\"]:\n        accession_to_values = mapping_df.loc[\n            mapping_df.key == item\n        ].groupby(\"accession\").value.unique().map(\" \".join)\n        df[item.lower()] = df.accession.map(accession_to_values)\n\n    print(\"Annotating using gtf\", args.ensembl_gtf)\n    gtf_df = gtfparse.read_gtf(args.ensembl_gtf)\n    matching_ensembl_genes = set(gtf_df.gene_id.unique())\n    ensembl_primary = []\n    for ensembls in df.ensembl.fillna(\"\").str.split():\n        result = \"\"\n        for item in ensembls:\n            if item in matching_ensembl_genes:\n                result = item\n                break\n        ensembl_primary.append(result)\n    df[\"ensembl_primary\"] = ensembl_primary\n    print(\"Fraction of records with matching ensembl genes\", (\n            df.ensembl_primary != \"\").mean())\n\n    gene_records = gtf_df.loc[gtf_df.feature == \"gene\"].set_index(\"gene_id\")\n    df[\"primary_ensembl_contig\"] = df.ensembl_primary.map(gene_records.seqname)\n    df[\"primary_ensembl_start\"] = df.ensembl_primary.map(gene_records.start)\n    df[\"primary_ensembl_end\"] = df.ensembl_primary.map(gene_records.end)\n    df[\"primary_ensembl_strand\"] = df.ensembl_primary.map(gene_records.strand)\n\n    print(\"Done annotating\")\n    print(df)\n\n    df.to_csv(args.out_csv, index=True)\n    print(\"Wrote: \", os.path.abspath((args.out_csv)))\n\n    print(\"Building index\")\n    fm.build()\n    fm.write(args.out_index)\n    print(\"Wrote: \", os.path.abspath((args.out_index)))\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/data_references/requirements.txt",
    "content": "shellinford\nbiopython\ngtfparse\n\n"
  },
  {
    "path": "downloads-generation/data_systemhcatlas/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Download some published MHC I ligands identified by mass-spec\n#\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=data_systemhcatlas\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\n# git rev-parse HEAD\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nwget -q https://systemhcatlas.org/Builds_for_download/180409_master_final.tgz\nmkdir extracted\ntar -xvzf *.tgz -C extracted\nwc -l extracted/*/*.csv\n\n# Write header line\ncat extracted/*/*.csv | head -n 1 > data.csv\n\n# Write concatenated data\ngrep -v SysteMHC_ID extracted/*/*.csv >> data.csv\n\n# Cleanup\nrm -rf extracted *.tgz\nls -lh data.csv\nwc -l data.csv\nbzip2 data.csv\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/data_systemhcatlas/README.md",
    "content": "# SysteMHC database dump\n\nThis is a database export of the [SysteMHC Atlas](https://systemhcatlas.org/)\ndownloaded from [here](https://systemhcatlas.org/Builds_for_download/). It is\ndistributed under the ODC Open Database License.\n\nTo generate this download run:\n\n```\n./GENERATE.sh\n```\n"
  },
  {
    "path": "downloads-generation/models_class1/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Model select standard MHCflurry Class I models.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\ncp $SCRIPT_DIR/write_validation_data.py .\n\nmkdir models\n\nGPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\necho \"Detected GPUS: $GPUS\"\n\nPROCESSORS=$(getconf _NPROCESSORS_ONLN)\necho \"Detected processors: $PROCESSORS\"\n\npython ./write_validation_data.py \\\n    --include \"$(mhcflurry-downloads path data_curated)/curated_training_data.with_mass_spec.csv.bz2\" \\\n    --exclude \"$(mhcflurry-downloads path models_class1_unselected)/models/train_data.csv.bz2\" \\\n    --only-alleles-present-in-exclude \\\n    --out-data test.csv \\\n    --out-summary test.summary.csv\n\nwc -l test.csv\n\ntime mhcflurry-class1-select-allele-specific-models \\\n    --data test.csv \\\n    --models-dir \"$(mhcflurry-downloads path models_class1_unselected)/models\" \\\n    --out-models-dir models \\\n    --scoring combined:mass-spec,mse,consensus \\\n    --consensus-num-peptides-per-length 10000 \\\n    --combined-min-models 8 \\\n    --combined-max-models 16 \\\n    --unselected-accuracy-scorer combined:mass-spec,mse \\\n    --unselected-accuracy-percentile-threshold 95 \\\n    --mass-spec-min-measurements 500 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 1\n\ntime mhcflurry-calibrate-percentile-ranks \\\n    --models-dir models \\\n    --num-peptides-per-length 100000 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/models_class1/README.md",
    "content": "# Class I allele-specific models (ensemble)\n\nThis download contains trained MHC Class I MHCflurry models.\n\nTo generate this download run:\n\n```\n./GENERATE.sh\n```"
  },
  {
    "path": "downloads-generation/models_class1/write_validation_data.py",
    "content": "\"\"\"\nWrite and summarize model validation data, which is obtained by taking a full\ndataset and removing the data used for training.\n\n\"\"\"\nimport argparse\nimport sys\nfrom os.path import abspath\n\nimport pandas\nimport numpy\nfrom sklearn.model_selection import StratifiedKFold\n\nparser = argparse.ArgumentParser(usage = __doc__)\n\nparser.add_argument(\n    \"--include\",\n    metavar=\"INPUT.csv\",\n    nargs=\"+\",\n    help=\"Input CSV to include\")\nparser.add_argument(\n    \"--exclude\",\n    metavar=\"INPUT.csv\",\n    nargs=\"+\",\n    help=\"Input CSV to exclude\")\nparser.add_argument(\n    \"--out-data\",\n    metavar=\"RESULT.csv\",\n    help=\"Output dadta CSV\")\nparser.add_argument(\n    \"--out-summary\",\n    metavar=\"RESULT.csv\",\n    help=\"Output summary CSV\")\nparser.add_argument(\n    \"--mass-spec-regex\",\n    metavar=\"REGEX\",\n    default=\"mass[- ]spec\",\n    help=\"Regular expression for mass-spec data. Runs on measurement_source col.\"\n    \"Default: %(default)s.\")\nparser.add_argument(\n    \"--only-alleles-present-in-exclude\",\n    action=\"store_true\",\n    default=False,\n    help=\"Filter to only alleles that are present in files given by --exclude. \"\n    \"Useful for filtering to only alleles supported by a predictor, where the \"\n    \"training data for the predictor is given by --exclude.\")\n\n\ndef run(argv):\n    args = parser.parse_args(argv)\n\n    dfs = []\n    for input in args.include:\n        df = pandas.read_csv(input)\n        dfs.append(df)\n    df = pandas.concat(dfs, ignore_index=True)\n    print(\"Loaded data with shape: %s\" % str(df.shape))\n    del dfs\n\n    df = df.ix[\n        (df.peptide.str.len() >= 8) & (df.peptide.str.len() <= 15)\n    ]\n    print(\"Subselected to 8-15mers: %s\" % (str(df.shape)))\n\n    if args.exclude:\n        exclude_dfs = []\n        for exclude in args.exclude:\n            exclude_df = pandas.read_csv(exclude)\n            exclude_dfs.append(exclude_df)\n        exclude_df = pandas.concat(exclude_dfs, ignore_index=True)\n        del exclude_dfs\n\n        df[\"_key\"] = df.allele + \"__\" + df.peptide\n        exclude_df[\"_key\"] = exclude_df.allele + \"__\" + exclude_df.peptide\n        df[\"_excluded\"] = df._key.isin(exclude_df._key.unique())\n        print(\"Excluding measurements per allele (counts): \")\n        print(df.groupby(\"allele\")._excluded.sum())\n\n        print(\"Excluding measurements per allele (fractions): \")\n        print(df.groupby(\"allele\")._excluded.mean())\n\n        df = df.loc[~df._excluded]\n        del df[\"_excluded\"]\n        del df[\"_key\"]\n\n        if args.only_alleles_present_in_exclude:\n            df = df.loc[df.allele.isin(exclude_df.allele.unique())]\n\n    df[\"mass_spec\"] = df.measurement_source.str.contains(args.mass_spec_regex)\n    df.loc[df.mass_spec , \"measurement_inequality\"] = \"mass_spec\"\n\n    if args.out_summary:\n        summary_df = df.groupby(\n            [\"allele\", \"measurement_inequality\"]\n        )[\"measurement_value\"].count().unstack().fillna(0).astype(int)\n        summary_df[\"total\"] = summary_df.sum(1)\n        summary_df.to_csv(args.out_summary)\n        print(\"Wrote: %s\" % args.out_summary)\n\n    if args.out_data:\n        df.to_csv(args.out_data, index=False)\n        print(\"Wrote: %s\" % args.out_data)\n\nif __name__ == '__main__':\n    run(sys.argv[1:])\n"
  },
  {
    "path": "downloads-generation/models_class1_kim_benchmark/GENERATE.sh",
    "content": "#!/bin/bash\n#\nset -x\n\nDOWNLOAD_NAME=models_class1_kim_benchmark\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\nexport PYTHONUNBUFFERED=1\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\ncp $SCRIPT_DIR/curate.py .\ncp $SCRIPT_DIR/write_validation_data.py .\n\ntime python curate.py \\\n    --data-kim2014 \\\n        \"$(mhcflurry-downloads path data_published)/bdata.2009.mhci.public.1.txt\" \\\n    --out-csv train.csv\n\nbzip2 train.csv\n\nmkdir models\ncp $SCRIPT_DIR/class1_pseudosequences.csv .\npython $SCRIPT_DIR/generate_hyperparameters.py > hyperparameters.yaml\n\nGPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\necho \"Detected GPUS: $GPUS\"\n\nPROCESSORS=$(getconf _NPROCESSORS_ONLN)\necho \"Detected processors: $PROCESSORS\"\n\ntime mhcflurry-class1-train-allele-specific-models \\\n    --data \"train.csv.bz2\" \\\n    --allele-sequences class1_pseudosequences.csv \\\n    --hyperparameters hyperparameters.yaml \\\n    --out-models-dir models \\\n    --held-out-fraction-reciprocal 10 \\\n    --min-measurements-per-allele 20 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50\n\ntime python ./write_validation_data.py \\\n    --include \"train.csv.bz2\" \\\n    --exclude \"models/train_data.csv.bz2\" \\\n    --only-alleles-present-in-exclude \\\n    --out-data test.csv \\\n    --out-summary test.summary.csv\n\nwc -l test.csv\n\nmkdir selected-models\ntime mhcflurry-class1-select-allele-specific-models \\\n    --data test.csv \\\n    --models-dir models \\\n    --out-models-dir selected-models \\\n    --scoring combined:mse,consensus \\\n    --consensus-num-peptides-per-length 10000 \\\n    --combined-min-models 8 \\\n    --combined-max-models 16 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 5\n\ntime mhcflurry-calibrate-percentile-ranks \\\n    --models-dir selected-models \\\n    --num-peptides-per-length 100000 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/models_class1_kim_benchmark/README.md",
    "content": "# Kim benchmark\n\nThis download trains MHCflurry predictors using the BD2009 dataset from \n[Dataset size and composition impact the reliability of performance benchmarks for peptide-MHC binding predictions](http://bmcbioinformatics.biomedcentral.com/articles/10.1186/1471-2105-15-241). The trained predictor can be evaluated on the BLIND test set from this publication.\n\nWe include this download as a resource for others wishing to benchmark new\nbinding predictors. It's also a working example of a complete MHCflurry\ntraining and model selection run on a simple dataset. See\n[GENERATE.sh](./GENERATE.sh).\n\n"
  },
  {
    "path": "downloads-generation/models_class1_kim_benchmark/class1_pseudosequences.csv",
    "content": "allele,pseudosequence\nHLA-A*01:01,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:02,YSAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:03,YFAMYQENMAHTDANTLYIMYRDYTWVARVYRGY\nHLA-A*01:06,YFAMYQENMAHTDANTLYIIYRDYTWVALAYRGY\nHLA-A*01:07,YFAMYQENVAHTDENTLYIIYRDYTWVARVYRGY\nHLA-A*01:08,YFAMYQENMAHTDANTLYIIYRDYTWVARVYWGY\nHLA-A*01:09,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:10,YFAMYQENMAHTDANTLYIIYRDYTWARRVYRGY\nHLA-A*01:12,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A*01:13,YFAMYQENMAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*01:14,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTGY\nHLA-A*01:17,YFAMYQENMAQTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:19,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A*01:20,YSAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A*01:21,YFAMYQENMAHTDANTLYIIYRDYTWAVRVYRGY\nHLA-A*01:23,YFAMYQENVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:24,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:25,YFAMYQENMAHTDANTLYIIYRDYTWVAQVYRGY\nHLA-A*01:26,YFAMYQENMAHTDANTLYIIYRDYTWAARVYRGY\nHLA-A*01:28,YFAMYQENMAHTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*01:29,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:30,YFAMYQENMAHTDANTLYIIYHYYTWVARVYRGY\nHLA-A*01:32,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:33,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:35,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:36,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:37,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:38,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:39,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:40,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:41,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:42,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:43,YYAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:44,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:45,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:46,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:47,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:48,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:49,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:50,YFAMYQENMAHTDANTLYIIYREYTWVARVYRGY\nHLA-A*01:51,YFAMYRNNVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:54,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:55,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:58,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:59,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:60,YFAMYPENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:61,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:62,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:63,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:64,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:65,YFAMYQENMAHTDANTLYIIYRDYTWVARVCRGY\nHLA-A*01:66,YFAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A*02:01,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:02,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:03,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:04,YFAMYGEKVAHTHVDTLYVMYHYYTWAVLAYTWY\nHLA-A*02:05,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:06,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:07,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:08,YYAMYGENVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:09,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:10,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:11,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:12,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:13,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYTWY\nHLA-A*02:14,YYAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A*02:16,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A*02:17,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:18,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:19,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:20,YFAMYGENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:21,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:22,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A*02:24,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:25,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:26,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:27,YFAMYGEKVAHTHVDTLYVRYHYYTWAAQAYTWY\nHLA-A*02:28,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:29,YFAMYGEQVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:30,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:31,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:33,YFAMYGEKVAHTHVDTLYVRSHYYTWAVLAYTWY\nHLA-A*02:34,YFAMYGEKVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:35,YFAMYGEKVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:36,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTGY\nHLA-A*02:37,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:38,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYRWY\nHLA-A*02:39,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:40,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:41,YYAMYGEKVAHTHVDTLYVRYQYYTWAVLAYTWY\nHLA-A*02:42,YFSMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:44,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:45,YFAMYQEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:46,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:47,YFAMYGEKVAHSHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:48,YFAMYEEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:49,YFAMYGEKVAHTHVDTLYVRYHYYTWAVRAYTWY\nHLA-A*02:50,YFAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*02:51,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:52,YFAMYGEKVAHTHVDTLYVRYEHYTWAVLAYTWY\nHLA-A*02:54,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:55,YFAMYRNNVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:56,YFAMYQENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:57,YYAMYGEKVAHTHVDTLYLMYHYYTWAVLAYTWY\nHLA-A*02:58,YFAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A*02:59,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:60,YFAMYGEKVAHTHVDTLYVRYHFYTWAVLAYTWY\nHLA-A*02:61,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:62,YFAMYGENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:63,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:64,YFAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A*02:65,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*02:66,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:67,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:68,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:69,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:70,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:71,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:72,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:73,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A*02:74,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:75,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:76,YSAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:77,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:78,YYAMYQENVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:79,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:80,YFAMYGEKVAHTHVDTLYVRYQDYTWAVLAYTWY\nHLA-A*02:81,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A*02:84,YYAMYGEKVAHTHVDTLYFRYHYYTWAVLAYTWY\nHLA-A*02:85,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:86,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:87,YFAMYGEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A*02:89,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:90,YFAMYGEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:91,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:92,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:93,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:95,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:96,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:97,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:99,YYAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:101,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYRWY\nHLA-A*02:102,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:103,YFAMYQENVAQTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:104,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A*02:105,YFAMYGEKVAHTHVDTLYVRYEYYTWAVLAYTWY\nHLA-A*02:106,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:107,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:108,YYAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:109,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:110,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:111,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:112,YFAMYGEKVAHTDENIAYVRCHYYTWAVLAYTWY\nHLA-A*02:114,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A*02:115,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:116,YFAMYGEKVAHTHLDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:117,YFAMYGEKVAHTHVDTLYVRYQDYTWAEWAYTWY\nHLA-A*02:118,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:119,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:120,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:121,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:122,YYAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*02:123,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:124,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A*02:126,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:127,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYKWY\nHLA-A*02:128,YFAMYGENVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:129,YYAMYEEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A*02:130,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:131,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A*02:132,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:133,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:134,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:135,YFAMYGEKVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*02:136,YFAMYGEKVAHTDENIAYVRYHYYTWAVWAYTWY\nHLA-A*02:137,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:138,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:139,YFAMYGEKVTHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:140,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:141,YFVMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:142,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:143,YYAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:144,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:145,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:146,YFAMYGEKVAHTDANTLYVRYHYYTWAVLAYTWY\nHLA-A*02:147,YFAMYGEKVAHTHVDTLYVRYDYYTWAVLAYTWY\nHLA-A*02:148,YFAMYGEKVAHTHVDTLYVRFHYYTWAEWAYTWY\nHLA-A*02:149,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:150,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:151,YFAMYGEKVAHTHVDTLYVRYDYYTWAVLAYTWY\nHLA-A*02:152,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*02:153,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:154,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYRWY\nHLA-A*02:155,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:156,YFAMYGEKVAHTHVDTLYIIYHYYTWAVLAYTWY\nHLA-A*02:157,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:158,YFAMYGEKVAHAHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:159,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:160,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:161,YFAVYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:162,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:163,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:164,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:165,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:166,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:167,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYKWY\nHLA-A*02:168,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:169,YYAMYQENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:170,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:171,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:172,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:173,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:174,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:175,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:176,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:177,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:178,YYAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A*02:179,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:180,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:181,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:182,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:183,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:184,YFAMYGEKVAHTHEDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:185,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:186,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:187,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:188,YFAMYGEKVAHTHVDTLYVRYDSYTWAVLAYTWY\nHLA-A*02:189,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:190,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:191,YFAMYGEKVAHTHVDTLYVRCHYYTWAVWAYTWY\nHLA-A*02:192,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:193,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:194,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:195,YFAMYQENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:196,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:197,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:198,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:199,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:200,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:201,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:202,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:203,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:204,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:205,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:206,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:207,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:208,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:209,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:210,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:211,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:212,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:213,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:214,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:215,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:216,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:217,YFAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:218,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:219,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:220,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:221,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:224,YFAMYGEKVAHTHVDTLYVGYHYYTWAVLAYTWY\nHLA-A*02:228,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:229,YYAMYGEKVAHTHVDTLYLRYRYYTWAVWAYTWY\nHLA-A*02:230,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:231,YFAMYGEKVAHTHVDTLYVRNHYYTWAVLAYTWY\nHLA-A*02:232,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:233,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTRY\nHLA-A*02:234,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:235,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:236,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:237,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:238,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:239,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:240,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:241,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:242,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:243,YTAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:244,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:245,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A*02:246,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A*02:247,YFAMYGEKVAHTDENTLYVRYHYYTWAVLAYTWY\nHLA-A*02:248,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:249,YFAMYVEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:251,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:252,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:253,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:254,YFAMYGEKVAHTHVDTLYVRYNFYTWAVLAYTWY\nHLA-A*02:255,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTGY\nHLA-A*02:256,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:257,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:258,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:259,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:260,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:261,YFAMYGEKVAHTHMDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:262,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLVYTWY\nHLA-A*02:263,YFAMYGEKVAHTHVDTLYVRYHYYTWSVLAYTWY\nHLA-A*02:264,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:265,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:266,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*03:01,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:02,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:04,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:05,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:06,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:07,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*03:08,YFAMYQENVAHTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:09,YFAMYQENVAQTHVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:10,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:12,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:13,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:14,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:15,YFAMYQENVAQTDVDTLYIIFRDYTWAELAYTWY\nHLA-A*03:16,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:17,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:18,YFAMYQENVAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*03:19,YFAMYQENVAQTDVDTLYIIFHYYTWAELAYTWY\nHLA-A*03:20,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:22,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:23,YFAMYGEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:24,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:25,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:26,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:27,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:28,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:29,YFAMYQENVVQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:30,YFAMYEEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:31,YFAMYQENVAQTDVDTLYIIYRYYTWAVQAYTWY\nHLA-A*03:32,YFAMYQENVAHIDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:33,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:34,YFAMYQENVAPTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:35,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:37,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:38,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:39,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:40,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:41,YFAMYQENVAHTDANTLYIIYRDYTWAELAYTWY\nHLA-A*03:42,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*03:43,YFAMYQENVAQTDVDTLYIIYEHYTWAELAYTWY\nHLA-A*03:44,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:45,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:46,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:47,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:48,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:49,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:50,YFAMYQENVAQTDVDTLYIIYRDYTWAEWAYTWY\nHLA-A*03:51,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:52,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:53,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:54,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:55,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:56,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:57,YFAMYQENVAQTDANTLYIIYRDYTWAELAYTWY\nHLA-A*03:58,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:59,CFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:60,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:61,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:62,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:63,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:64,YFAMYQENVAQTDVDTLYIIYRDYTWADLAYTWY\nHLA-A*03:65,YFAMYQENVAQTDVDTLYIIYRDYTWAEQAYTWY\nHLA-A*03:66,YFAMYQENVAQTDVDTLYIIYRDYTWAERAYTWY\nHLA-A*03:67,YFATYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:70,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:71,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:72,YSAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:73,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:74,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:75,YFAMYQENVAQTDVDTLYLMYRDYTWAELAYTWY\nHLA-A*03:76,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:77,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:78,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:79,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:80,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:81,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:82,YFAMYQENVAQTDVDTLYIIYEHYTWAVQAYTWY\nHLA-A*11:01,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:02,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:03,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A*11:04,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTWY\nHLA-A*11:05,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:06,YYAMYQENVAQTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:07,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:08,YYAMYQENVAQTDVDTLYIIYRDYTWAERAYRWY\nHLA-A*11:09,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:10,YYAMYRNNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:11,YYAMYLQNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:12,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:13,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:14,YYAMYQENVAQTDVDTLYIIYRDYTWARQAYRWY\nHLA-A*11:15,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:16,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:17,YYAMYQENMAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A*11:18,YYAMYQENVAHTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:19,YYAMYQENVAHTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:20,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A*11:22,YYAMYQENVAQTDVDTLYIIYPDYTWAAQAYRWY\nHLA-A*11:23,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:24,YYAMYQENVAQTDVDTLYIIYRDYTWAALAYRWY\nHLA-A*11:25,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYRWY\nHLA-A*11:26,YYAMYQENVAQTDVDTLYIMYRDYTWAAQAYRWY\nHLA-A*11:27,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTGY\nHLA-A*11:29,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:30,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:31,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYRWY\nHLA-A*11:32,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:33,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:34,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:35,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*11:36,YYAMYQENVAQTDVDTLYIICRDYTWAAQAYRWY\nHLA-A*11:37,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:38,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRGY\nHLA-A*11:39,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRGY\nHLA-A*11:40,YYAMYQENVAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A*11:41,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:42,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:43,YTAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:44,YYAMYQENVAQTDVDTLYIIYRDYTWAARAYRWY\nHLA-A*11:45,YYAMYQENVAQTDADTLYIIYRDYTWAAQAYRWY\nHLA-A*11:46,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:47,YHAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:48,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:49,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:51,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:53,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:54,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:55,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:56,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:57,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:58,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:59,YYAMYQENVAQTDVDTLYIIYGDYTWAAQAYRWY\nHLA-A*11:60,YYAMYQENVAQTDVDTLYIIYRDYTWAVQAYRWY\nHLA-A*11:61,YYAMYQENAAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:62,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:63,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:64,YYAMYQENVAQTDVDTLHIIYRDYTWAAQAYRWY\nHLA-A*23:01,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:02,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A*23:03,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:04,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A*23:05,CSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:06,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:09,YSAMYQENMAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:10,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYRGY\nHLA-A*23:12,YSAMYEEKVAHTHENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:13,YSAMYEEKVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:14,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:15,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:16,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:17,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:18,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:20,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:21,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:22,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:23,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:24,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:25,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:26,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:02,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:03,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:04,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:05,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:06,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A*24:07,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:08,YSAMYGEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:10,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRWY\nHLA-A*24:13,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:14,YSAMYEEKVAHTDENIAYVRYHYYTWAVQAYTGY\nHLA-A*24:15,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A*24:17,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A*24:18,YSAMYEEKVAHTDENIAYLMFHYYTWAELAYTWY\nHLA-A*24:19,YSAMYEEKVAQTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:20,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:21,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:22,YSAMYEEKVAHTDENIAYLMFHYYTWAVWVYTWY\nHLA-A*24:23,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:24,YSAMYRNNVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:25,CSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:26,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:27,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:28,YSAMYEEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:29,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:30,YSAMYEEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:31,YSAMYEQKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:32,YSAMYEEKVAHTDESIAYLMFHYYTWAVQAYTGY\nHLA-A*24:33,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:34,YSAMYEEKVAHIDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:35,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:37,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:38,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:39,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:41,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A*24:42,YSAMYGEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:43,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:44,YSAMYEEKVAHTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:46,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRGY\nHLA-A*24:47,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:49,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:50,YYAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:51,YSAMYEEKVAHTDENIAYLIYHYYTWAVQAYTGY\nHLA-A*24:52,YSAMYEEKVAHTDENIAYLRFHYYTWAVQAYTGY\nHLA-A*24:53,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A*24:54,YSAMYEEKVAHTDENIAYLMFHYYTWAVQPYTGY\nHLA-A*24:55,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYEGY\nHLA-A*24:56,YSAMYEEKVAHTDENIAYLMFHYYTWAEQAYTGY\nHLA-A*24:57,YSAMYEEKVAHTDENIAYIMYHYYTWAVQAYTGY\nHLA-A*24:58,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:59,YSAMYEEKVAHTDENIAYLMFHYYTWAAQAYTGY\nHLA-A*24:61,YSAMYEEKVAHTDEKIAYLMFHYYTWAVQAYTGY\nHLA-A*24:62,YSAMYEEKVAHTDENIAYLMFQDYTWAVQAYTGY\nHLA-A*24:63,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:64,YSAMYEEKVAHTDENIAYLWIHYYTWAVQAYTGY\nHLA-A*24:66,YSAMYEEKVAHTDENIAYLMFEHYTWAVQAYTGY\nHLA-A*24:67,YSAMYRNNVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:68,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:69,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:70,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:71,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:72,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:73,YSAMYEEKVAHTDENIAYLMFDYYTWAVQAYTGY\nHLA-A*24:74,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:75,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:76,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:77,YSAMYQEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:78,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:79,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:80,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:81,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:82,YTAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:85,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:87,YSAMYEEKVAHTDENIAYLMFHYYTWAVRAYTGY\nHLA-A*24:88,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:89,YSAMYGEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:91,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:92,YSAMYEEKVAHTDENIAYIIYHYYTWAVQAYTGY\nHLA-A*24:93,YSAMYEEKVAHTDENIAYVMFHYYTWAVQAYTGY\nHLA-A*24:94,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A*24:95,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:96,YSAMYEEKVAHTDENIAYLMFNFYTWAVQAYTGY\nHLA-A*24:97,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:98,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:99,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:100,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:101,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:102,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:103,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:104,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:105,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:106,YSAMYEEKVAHTDENIAYLMFDDYTWAVQAYTGY\nHLA-A*24:107,YSAMYEEKVAHTDENIAYLMFHYYTWAVHAYTGY\nHLA-A*24:108,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:109,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:110,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:111,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:112,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:113,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:114,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:115,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:116,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:117,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:118,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:119,YSAMYEEKVAHADENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:120,YSAMYEEKVAHTDENIAYIMFHYYTWAVQAYTGY\nHLA-A*24:121,YSAMYEEKVAHTDENIAYLMFHSYTWAVQAYTGY\nHLA-A*24:122,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:123,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:124,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:125,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:126,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:127,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:128,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:129,YSAMYQENMAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:130,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:131,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:133,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:134,YSAMYEEKVAHTDENIAYLMFHYYPWAVQAYTGY\nHLA-A*24:135,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:136,YSAMYEEKVAHTDENIAYLMFHYYTWVVQAYTGY\nHLA-A*24:137,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:138,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTWY\nHLA-A*24:139,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:140,YSTMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:141,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:142,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:143,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYLGY\nHLA-A*24:144,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*25:01,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:02,YYAMYRNNVAQTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:03,YFAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:04,YYAMYRNNVAHTDESIAYIRYQDYTWAEQAYRWY\nHLA-A*25:05,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:06,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYTWY\nHLA-A*25:07,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:08,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:09,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:10,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:11,YYAMYRNNVAHTHESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:13,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*26:01,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:02,YYAMYRNNVAHTDANTLYIRYQNYTWAEWAYRWY\nHLA-A*26:03,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:04,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYLWY\nHLA-A*26:05,YYAMYRNNVAHTDENTLYIRYQDYTWAEWAYRWY\nHLA-A*26:06,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:07,YYAMYGEKVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:08,YYAMYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A*26:09,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYTWY\nHLA-A*26:10,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:12,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A*26:13,YYAMYRNNVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:14,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:15,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:16,YSAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:17,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:18,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A*26:19,YYAMYQENVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:20,YFAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:21,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:22,YYAMYRNNVAHTDANTLYVRYQDYTWAEWAYRWY\nHLA-A*26:23,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:24,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:26,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:27,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:28,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:29,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A*26:30,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:31,YYAMYPNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:32,YYAMYRNNVAHTDANTLYMVYQDYTWAEWAYRWY\nHLA-A*26:33,YYAMYRNNVAQIHANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:34,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYWWY\nHLA-A*26:35,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:36,YYAMYRNNVAHTHANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:37,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:38,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:39,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:40,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:41,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:42,YYAIYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:43,YYAMYRNNVAHTDANTLYIRYQDYTWAELAYRWY\nHLA-A*26:45,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:46,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:47,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:48,YYAMYRNKVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A*26:49,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A*26:50,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*29:01,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:02,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:03,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTGY\nHLA-A*29:04,YTAMYLQHVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:05,YTAMYLQNVAQTDANTLYIMYRDYTWAEQAYTWY\nHLA-A*29:06,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:07,YTAMYLQNVAQTDANTLYLMFRDYTWAVLAYTWY\nHLA-A*29:09,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:10,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:11,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:12,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:13,YTAMYLQNVAQTDESIAYIMYRDYTWAVLAYTWY\nHLA-A*29:14,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:15,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:16,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:17,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:18,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:19,YTAMYLQNVAHTHVDTLYIMYRDYTWAVLAYTWY\nHLA-A*29:20,YTAMYLQNVAHTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:21,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:22,YTAMYLQNVAQTDANTLYVRYRDYTWAVLAYTWY\nHLA-A*30:01,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:02,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:03,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:04,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A*30:06,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A*30:07,YSAMYEEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:08,YYAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:09,YSAMYQENVAHTDENTLYIIYEHYTWAVLAYTWY\nHLA-A*30:10,YSAMYQENVAHTDENTLYIIHEHYTWARLAYTWY\nHLA-A*30:11,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:12,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:13,YSAMYQENVAHTHVDTLYIIYEHYTWARLAYTWY\nHLA-A*30:15,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:16,YSAMYQENVAQTHVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:17,YSAMYQENVAQTDVDTLYIIYEHYTWAVWAYTWY\nHLA-A*30:18,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:19,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:20,YSAMYQENVAQTEVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:22,YSAMYGEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:23,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:24,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:25,YSAMYQENVAQTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:26,YSAMYQENVAQTDVDTLYIIYRDYTWAWLAYTWY\nHLA-A*30:28,YSAMYQENVAHTDENTLYIVYEHYTWARLAYTWY\nHLA-A*30:29,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTSY\nHLA-A*30:30,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:31,YSAMYQENVARTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:32,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:33,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:34,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:35,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:36,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:37,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:38,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:39,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:40,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:41,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*31:01,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:02,YTAMYQEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:03,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*31:04,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*31:05,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A*31:06,YTAMYQENVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*31:07,YTAMYQEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:08,YTAMYEEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:09,YTAMYQENVGHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:10,YTAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:11,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:12,YTAMYQENVAHIDVDTLYIKYQDYTWAVLAYTWY\nHLA-A*31:13,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:15,YTAMYQENVARIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:16,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:17,YTAMYQENVAHINVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:18,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTRY\nHLA-A*31:19,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:20,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:21,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:22,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:23,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:24,YTAMYQENVAHIDVDTLYIMYQDYTWAAQAYRWY\nHLA-A*31:25,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYRWY\nHLA-A*31:26,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:27,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:28,YTAMYQENVTHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:29,YTAMYQENVAHIDVDTLYLMFQDYTWAVLAYTWY\nHLA-A*31:30,YTAMYQENVAHIDVDTLYIMYQDYTWAVWAYTWY\nHLA-A*31:31,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:32,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:33,YTAMYQENVAHIDGDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:34,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:35,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:36,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:37,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*32:01,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:02,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A*32:03,YFAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*32:04,YFAMYQENVAHTDESIAYIIYRDYTWAELAYTWY\nHLA-A*32:05,YFAMYQEKVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:06,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:07,YSAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:08,YFAMYQENVAHTHESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:09,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTRY\nHLA-A*32:10,YFAMYQENVAHTDESIAYIMYQDYTWAEWAYTWY\nHLA-A*32:12,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:13,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTGY\nHLA-A*32:14,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:15,YFAMYRNNVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:16,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:17,YFAMYQENVAQTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:18,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:20,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:21,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:22,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A*32:23,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:24,YFAMYQENMAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:25,YFAMYHENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*33:01,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:03,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:04,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:05,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:06,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:07,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:08,YTAMYGEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:09,YTAMYGENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:10,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A*33:11,YTAMYRNNVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:12,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:13,YTAMYRNNVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A*33:14,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:15,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:16,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:17,YTAMYRNNVAHIDADTLYIMYQDYTWAVLAYTWY\nHLA-A*33:18,YTAMYRNNVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*33:19,YTAMYRNNVAHIDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*33:20,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:21,YTAMYEENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:22,YTAMYRNNVAHIDVDTLYVRYQDYTWAVLAYTWY\nHLA-A*33:23,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:24,YTAMYRNNVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:25,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:26,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:27,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:28,YTAMYRNNVAHIDVDTLYIMYQDYTWAELAYTWY\nHLA-A*33:29,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:30,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:31,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*34:01,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A*34:02,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:03,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:04,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:05,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A*34:06,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYTWY\nHLA-A*34:07,YYAMYRNNVSQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:08,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*36:01,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A*36:02,YFAMYQENMAHTDANTLYIIYRDYTWVARAYTWY\nHLA-A*36:03,YFAMYQENMAHTDANTLYLMYRDYTWVARVYTWY\nHLA-A*36:04,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRWY\nHLA-A*36:05,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A*43:01,YYAMYLQNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*66:01,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:02,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A*66:03,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A*66:04,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWH\nHLA-A*66:05,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:06,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYRWY\nHLA-A*66:07,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:08,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:09,YYAMYRNNVAQTDVDTLYVRYQDYTWAEWAYRWY\nHLA-A*66:10,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRGY\nHLA-A*66:11,YYAMYRNNVAQTDADTLYIRYQDYTWAEWAYRWY\nHLA-A*66:12,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:13,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:14,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:15,YYAMYRNNVAHIDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*68:01,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:02,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:03,YYAMYRNNVAHTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:04,YYAMYRNNVAHIDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:05,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:06,YYAMYRNNVAQTDVDTLYIMYEHYTWAVWAYTWY\nHLA-A*68:07,YYAMYRNNVAQTDVDTLYIMYRHYTWAVWAYTWY\nHLA-A*68:08,YYAMYRNNVAQTDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*68:09,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTWY\nHLA-A*68:10,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:12,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:13,YYAMYRENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:14,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:15,YYAMYRNNVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:16,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:17,YYAMYRNNVAQTDVDTLYIMYRVYTWAVWAYTWY\nHLA-A*68:19,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:20,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:21,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:22,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:23,YYAMYRNNVAQTDVDTLYIRYRDYTWAVWAYTWY\nHLA-A*68:24,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:25,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:26,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTGY\nHLA-A*68:27,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:28,YYAMYRNNVAQTDVDTLYIRYHYYTWAVRAYTWY\nHLA-A*68:29,YTAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:30,YYAMYGENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:31,YYAMYRNNVAHTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:32,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:33,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:34,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYMWY\nHLA-A*68:35,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:36,YYAMYRNNVAQTDENIAYIMYRDYTWAVWAYTWY\nHLA-A*68:37,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:38,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:39,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:40,YYAMYRNNVGQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:41,YYAMYRNNVAQTDVDTLYIMYRDYTWVVWAYTWY\nHLA-A*68:42,YYAMYRNNVAQTDVDTLYIMYRDYTWAEWAYTWY\nHLA-A*68:43,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:44,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:45,YSAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:46,YYAMYRNNVAQTDVNTLYIMYRDYTWAVWAYTWY\nHLA-A*68:47,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:48,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:50,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:51,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:52,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:53,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:54,YYAMYRNNVAQTDVDTLYIRYHYYTWAEWAYTWY\nHLA-A*69:01,YYAMYRNNVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*74:01,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:02,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:03,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:04,YFAMYGEKVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:05,YFAMYQENVAHADVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:06,YFAMYQENVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:07,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:08,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:09,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:10,YFAMYQENVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A*74:11,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:13,YFAMYQENVAQTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*80:01,YFAMYEENVAHTNANTLYIIYRDYTWARLAYEGY\nHLA-A*80:02,YFAMYEENVAHTDVDTLYIIYRDYTWARLAYEGY\nHLA-B*07:02,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:03,YYSEYRNIYTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:04,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYEWY\nHLA-B*07:05,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:06,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:07,YYSEYRNIYAQTDESNLYLRYDYYTWAERAYEWY\nHLA-B*07:08,YYSEYRNIFTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:09,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B*07:10,YYSEYRNICAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:11,YYSEYRNIYAQTDENNLYLSYDSYTWAERAYEWY\nHLA-B*07:12,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:13,YYSGYREKYRQADVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:14,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:15,YYSEYRNIYAQADVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:16,YYSEYRNIYTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:17,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B*07:18,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:19,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYTWY\nHLA-B*07:20,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLSY\nHLA-B*07:21,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:22,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:23,YYSEYRNIYAQTDESNLHLSYDYYTWAERAYEWY\nHLA-B*07:24,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B*07:25,YYSEYRNIYAQTDESNLYLSYDYYTWAVDAYEWY\nHLA-B*07:26,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYEWY\nHLA-B*07:27,YYSEYRNISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B*07:28,YYSEYRNIYAQTDESNLYLSYDDYTWAERAYEWY\nHLA-B*07:29,YDSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:30,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:31,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYTWY\nHLA-B*07:32,YYSEYRNIFTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:33,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:34,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYTWY\nHLA-B*07:35,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:36,YYSEYRNIYAQTDENIAYLSYDYYTWAERAYEWY\nHLA-B*07:37,YYSEYRNIYANTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:38,YYSEYRNIFTNTYENIAYLSYDYYTWAERAYEWY\nHLA-B*07:39,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:40,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:41,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:42,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:43,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYTWY\nHLA-B*07:44,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:45,YYSEYRNIYAQTDESNLYLSYDYYTWAERTYEWY\nHLA-B*07:46,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:47,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:48,YYSEYRNIYAQTDESNLYLSYDYYTWAVLAYEWY\nHLA-B*07:50,YYSEYRNISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:51,YYSEYRNIYAQTDESNLYLSYDYYTWAARAYEWY\nHLA-B*07:52,YYSEYRNIYAQTDESNLYLSYDYYTWAERVYEWY\nHLA-B*07:53,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:54,YYSEYREIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:55,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYEWY\nHLA-B*07:56,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:57,YYSEYRNIYAQTDENNLYLSYDYYTWAERAYEWY\nHLA-B*07:58,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:59,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:60,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B*07:61,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:62,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:63,YYSDYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:64,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEGY\nHLA-B*07:65,YYATYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:66,YYSEYRNIYAQTDESNLYLSYDYYTWAEQAYEWY\nHLA-B*07:68,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:69,YYSEYRNICTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:70,YCSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:71,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:72,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:73,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:74,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:75,YYSEYRNIYAQTYENNLYLSYDYYTWAERAYEWY\nHLA-B*07:76,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:77,YYSEYRNIYAQTDESNLYLRSDYYTWAERAYEWY\nHLA-B*07:78,YYSEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B*07:79,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:80,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYEWY\nHLA-B*07:81,YYSEYRNIYAQTDESIAYLSYDYYTWAERAYEWY\nHLA-B*07:82,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:83,YYSEYRNIFAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:84,YYSEYRNIYAQTDESNLYWTYDYYTWAERAYEWY\nHLA-B*07:85,YYSEYRNICTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:86,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:87,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:88,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:89,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:90,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:91,YYSEYRNIYAQTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:92,YYSEYRNIYAQTDVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:93,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:94,YYSEYWNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:95,YYSEYRNIYAQTDESNLYFSYDYYTWAERAYEWY\nHLA-B*07:96,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:97,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:98,YYSEYRNIYAQTDESNLYLSYDYYTCAERAYEWY\nHLA-B*07:99,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:100,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYLWY\nHLA-B*07:101,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:102,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:103,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:104,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:105,YYSEYRNIYAQTVESNLYLSYNYYTWAERAYEWY\nHLA-B*07:106,YYSEYRNIYAQTDESNLYLSYDYYTRAERAYEWY\nHLA-B*07:107,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:108,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:109,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:110,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:112,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:113,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:114,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:115,YYSEYRNIYAQTDESNLYLSYNFYTWAERAYEWY\nHLA-B*08:01,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:02,YDSEYRNIFTNTDENTAYLSYNYYTWAVDAYTWY\nHLA-B*08:03,YDSEYRNIFTNTYENIAYLSYNYYTWAVDAYTWY\nHLA-B*08:04,YDSEYRNISTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:05,YDSEYRNTFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:07,YDSEYRNIFTNTDESNLYLSYDYYTWAVDAYTWY\nHLA-B*08:09,YDSEYRNIFTNTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*08:10,YDSEYRDIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:11,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYAWY\nHLA-B*08:12,YDSEYRNIFTNTDESNLYLRYNYYTWAVDAYTWY\nHLA-B*08:13,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYTWY\nHLA-B*08:14,YDSEYRNIFTNTDESNLYLSYHDYTWAVDAYTWY\nHLA-B*08:15,YDSEYRNIFTNTDVSNLYLSYNYYTWAVDAYTWY\nHLA-B*08:16,YDSEYRNIFTNADESNLYLRYNYYTWAVDAYTWY\nHLA-B*08:17,YDSEYREISTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B*08:18,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:20,YDSEYRNIFTNTDESNLYLSYNYYTWAERAYTWY\nHLA-B*08:21,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYLWY\nHLA-B*08:22,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:23,YDSEYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:24,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:25,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYLWY\nHLA-B*08:26,YYAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:27,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:28,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*08:29,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:31,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:32,YDSTYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:33,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:34,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:35,YDSEYRNIFTNTDESNLYLSYNSYTWAVDAYTWY\nHLA-B*08:36,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:37,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*08:38,YDSEYREIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:39,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:40,YDSEYRNIFTNTDESNLYLSYNYYTWAVRAYEWY\nHLA-B*08:41,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:42,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:43,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:44,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:45,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:46,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:47,YDSEYRNIFTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B*08:48,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:49,YDSEYRNIFTNTDESNLYIRSNFYTWAVDAYTWY\nHLA-B*08:50,YYSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:51,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:52,YDSEYRNIFTNTDESIAYLSYNYYTWAVDAYTWY\nHLA-B*08:53,YDSEYRNIFTNTDESNLYLSYNYYTWAEDAYTWY\nHLA-B*08:54,YDSEYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:55,YDSEYRNIFTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B*08:56,YDAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:57,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:58,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:59,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:60,YDSEYRNIFTNTDESNLYISYNYYTWAVDAYTWY\nHLA-B*08:61,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:62,YHSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*13:01,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:02,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:03,YYTMYREISTNTYENTAYWTYNLYTWAVLAYLWY\nHLA-B*13:04,YYTMYREISTNTYENTAYWTYDSYTWAVLAYLWY\nHLA-B*13:06,YYTMYREISTNTYENTAYIRYNLYTWAVLAYTWY\nHLA-B*13:09,YYTMYREISTNTYESNLYWTYNLYTWAVLAYEWY\nHLA-B*13:10,YYTMYREISTNTYENTAYLRYDSYTWAVLAYEWY\nHLA-B*13:11,YYTMYREISTNTYENTAYLRYNLYTWAVLAYEWY\nHLA-B*13:12,YYTMYREISTNTYENTAYIRYNLYTWAVLAYGWY\nHLA-B*13:13,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B*13:14,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:15,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWH\nHLA-B*13:16,YYTMYREISTNTYENTAYWTYNLYTWAELAYEWY\nHLA-B*13:17,YYAMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:18,YYTMYREISTNTYENTAYWTYNLYTWAVRAYEWY\nHLA-B*13:19,YYTMYREVSTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:20,YYTMYREISTNTYENTAYIRYNLYTWAELAYEWY\nHLA-B*13:21,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B*13:22,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:23,YHTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:25,YYTMYREISTNTYESTAYIRYNLYTWAVLAYEWY\nHLA-B*13:26,YYTMYREISTNTYENTAYIRYDSYTWAVLAYEWY\nHLA-B*13:27,YYTMYREISTNTYENTAYWTFNLYTWAVLAYEWY\nHLA-B*13:28,YYTMYREISTNTYENTACIRYNLYTWAVLAYEWY\nHLA-B*13:29,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:30,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:31,YYTMYREISTNTYENTAYWTYNLYTWAEWAYEWY\nHLA-B*13:32,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:33,YYAMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:34,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:35,YYTMYREISTNTYENTAYWTYDYYTWAVLAYEWY\nHLA-B*13:36,YYTMYRNISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:37,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:38,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:39,YYTMYREISTNTYENNLYIRYNLYTWAVLAYEWY\nHLA-B*14:01,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:02,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:03,YYSEYRNICTNTDESNLYLWYNFYTWAERAYTWH\nHLA-B*14:04,HYSEYRNNCTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:05,YYSEYRNICTNTDESNLYLSYNFYTWAELAYTWH\nHLA-B*14:06,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B*14:08,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B*14:09,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:10,YYSEYRNICTNTDESNLYIRYNFYTWAELAYTWH\nHLA-B*14:11,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:12,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:13,YYSEYRNICTNTDESNLYLSYNYYTWAELAYTWH\nHLA-B*14:14,YYSEYRNICTNTYESNLYLWYNFYTWAELAYTWH\nHLA-B*14:15,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:16,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:17,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:18,YYSEYRNICTNTYESNLYLWYNFYTWAELAYTWH\nHLA-B*15:01,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:02,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:03,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:04,YYAMYREISTNTYESNLYWTYDSYTWAEWAYLWY\nHLA-B*15:05,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:06,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B*15:07,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*15:08,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:09,YYSEYRNICTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B*15:10,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B*15:11,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:12,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B*15:13,YYAMYRNISTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B*15:14,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLSY\nHLA-B*15:15,YYAMYRNISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:16,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:17,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:18,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:19,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B*15:20,YYAMYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*15:21,YYAMYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:23,YYSEYRNICTNTYENIAYLRYDSYTWAELAYLWY\nHLA-B*15:24,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B*15:25,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:27,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B*15:28,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:29,YYSEYRNIFTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:30,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*15:31,YYAMYRNISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:32,YYAMYREISTNTYESNLYLRSDSYTWAEWAYLWY\nHLA-B*15:33,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:34,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:35,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B*15:36,YYAMYREISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B*15:37,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWH\nHLA-B*15:38,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWH\nHLA-B*15:39,YYAMYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:40,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B*15:42,YYAMYREISTNTYESNLYWTYNLYTWAELAYTWY\nHLA-B*15:43,YYAMYREISTNTYEDTLYLRYDSYTWAEWAYLWY\nHLA-B*15:44,YYAMYRNICTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B*15:45,YYAMYREISTNTYESNLYLSYDYYTWAEWAYLWY\nHLA-B*15:46,YYAKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:47,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*15:48,YYAMYREISTNTYESNLYLRYNYYTWAVLTYLWY\nHLA-B*15:49,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*15:50,YYAMYREISTNTYESNLYLRYDSYTWAEWAYTWY\nHLA-B*15:51,YYSEYRNICTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B*15:52,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*15:53,YYTKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:54,YYSEYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:55,YYAMYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*15:56,YYAMYREIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:57,YYAMYREISTNTYVNNLYLRYDSYTWAEWAYLWY\nHLA-B*15:58,YYAMYREISTNTYESNLYLRYNFYTWAEWAYLWY\nHLA-B*15:60,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:61,YYSEYREISTNTDESNLYLRYDSYTWAELAYLWY\nHLA-B*15:62,YYSEYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:63,YYAMYREISTNTYESNLYLRYDYYTWAEWAYLWY\nHLA-B*15:64,YYSEYRNISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:65,YYAMYREISTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B*15:66,YYAMYREICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:67,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:68,YYSEYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*15:69,YYSEYREISTNTYESNLYLRYDSYTWAELTYTWY\nHLA-B*15:70,YYAMYREISTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:71,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:72,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:73,YYAMYREISTNTYESNLYLRYNLYTWAEWAYLWY\nHLA-B*15:74,YYSEYREISINTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:75,YYAMYREISTNTYESNLYLRYDSYTWAQWAYLWY\nHLA-B*15:76,YYAMYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:77,YYAMYREISTNTYESNLYIRYDDYTWAEWAYLWY\nHLA-B*15:78,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:80,YYSEYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:81,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:82,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:83,YYAMYREISTNTYESNLYWTYNYYTWAVDAYTWY\nHLA-B*15:84,YYAMYREISTNTYESNLYLRFDSYTWAVRAYLWY\nHLA-B*15:85,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:86,YYAMYREISTNTYESNLYLRYNLYTWAVLAYTWY\nHLA-B*15:87,YYAMYREISTNTYESIAYLRYDSYTWAEWAYLWY\nHLA-B*15:88,YYAMYRNISTNTYESNLYIRYDSYTWATLAYLWY\nHLA-B*15:89,YYAMYRNISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B*15:90,YYSEYRNICTNTYESNLYLRYDYYTWAELVYLWY\nHLA-B*15:91,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLSY\nHLA-B*15:92,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:93,YYSEYRNICTNTYESNLYLRYDSYTWAELAYTWY\nHLA-B*15:95,YYAMYQENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:96,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:97,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:98,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:99,YYSEYRNICTNTYESNLYLRYDYYTWAERAYLWY\nHLA-B*15:101,YYAMYREIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:102,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:103,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:104,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:105,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:106,YYAKYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:107,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:108,YYSEYRNICTNTYESNLYLRYDSYTWAELTYLWY\nHLA-B*15:109,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B*15:110,YYAMYREISTNTYESNLYLRCDSYTWAEWAYLWY\nHLA-B*15:112,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:113,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:114,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*15:115,YYSEYRNICTNTYESTAYLRYDSYTWAELAYLWY\nHLA-B*15:116,YYAMYREISTNTYESNLYLRYDSYSLAEWAYLWY\nHLA-B*15:117,YYAMYREISTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B*15:118,YYAMYREISTNTYESNLYLMYDSYTWAEWAYLWY\nHLA-B*15:119,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:120,YYAMYRDISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:121,YYAMYRNISTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B*15:122,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:123,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:124,YYSEYRNICTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*15:125,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:126,YYAMYREISTNTYESNLYLSYDSCTWAEWAYLWY\nHLA-B*15:127,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:128,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:129,YYAMYREISTNTYESNLYLNYDSYTWAEWAYLWY\nHLA-B*15:131,YYSEYREISTNTYESNLYLRYDSYTWAELAYLSY\nHLA-B*15:132,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:133,YYSEYRNICTNTYESNLYLRYDFYTWAELAYLWY\nHLA-B*15:134,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:135,YYAMYREISTNTYENNLYLRYDSYTWAEWAYLWY\nHLA-B*15:136,YYAMYREISTNTYESNLYLRYDSYTWAVLTYLWY\nHLA-B*15:137,YYAMYREISTNTYESNLYWTYNFYTWAEWAYLWY\nHLA-B*15:138,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B*15:139,YYAMYRNISANTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:140,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:141,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B*15:142,YDAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:143,YYAKYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:144,YYAMYRNISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:145,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:146,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:147,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:148,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:150,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*15:151,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:152,YYAMYREIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:153,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:154,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:155,YYAMYREISTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B*15:156,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:157,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B*15:158,YYSEYREISTNTYESNLFLRYDSYTWAELAYLWY\nHLA-B*15:159,YYAMYREISTNTYESNLHLRYDSYTWAEWAYLWY\nHLA-B*15:160,YYAMHREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:161,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLSY\nHLA-B*15:162,YYAMYRENMASTYENIAYLRYHDYTWAALAYLWY\nHLA-B*15:163,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:164,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:165,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:166,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:167,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:168,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:169,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:170,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:171,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:172,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:173,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:174,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:175,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:176,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:177,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:178,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:179,YYAMYREISTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B*15:180,YDSEYRNIFTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:183,YYTMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:184,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:185,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWH\nHLA-B*15:186,YYSEYRNICTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B*15:187,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:188,YYAMYREISTNTYESNLYLRYNYYTWAVLAYTWY\nHLA-B*15:189,YYAMYRNICTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:191,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:192,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:193,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:194,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:195,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B*15:196,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:197,YYSEYRNICTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B*15:198,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:199,YYAMYREISTNTYESNLYLRYDSYTWAEDAYTWY\nHLA-B*15:200,YYSEYRNICTNTYESNLYLRYDSYTWATLAYLWY\nHLA-B*15:201,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:202,YYATYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*18:01,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:02,YHSTYRNISTNTYESNLYLNYDSYTWAVLAYTWH\nHLA-B*18:03,YHSTYRNISTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:04,YYATYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:05,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:06,YHSTYRNISTNTYVSNLYLRYDSYTWAVLAYTWH\nHLA-B*18:07,YHSTYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:08,YHSTYRNISTNTYESNLYLRCDSYTWAVLAYTWH\nHLA-B*18:09,YHSTYRNISTNTYENTAYLRYDSYTWAVLAYTWH\nHLA-B*18:10,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*18:11,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWY\nHLA-B*18:12,YHSTYREISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:13,YHSTYRNISTNTYESNLYLRYDSYTWAVRAYTWH\nHLA-B*18:14,YHSTYRNISTNTYESNLYLSYDSYTWAVLAYTWH\nHLA-B*18:15,YHSTYRNISTNTYESNLYLRYDSYTWAELAYTWH\nHLA-B*18:18,YHSTYRNISTNTYESNLYLRSDSYTWAVLAYTWH\nHLA-B*18:19,YHSTYRNISTNTYESNLYLRYDSYTWAEWAYTWH\nHLA-B*18:20,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:21,YHSTYRNISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*18:22,YHSTYRNISTNTYESNLYISYDSYTWAVLAYTWH\nHLA-B*18:24,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:25,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:26,YHSTYRNISTNTYESNLYLRYNYYTWAVLAYTWH\nHLA-B*18:27,YHSTYRNISTNTYESNLYLMFDSYTWAVLAYTWH\nHLA-B*18:28,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:29,YHATYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:30,YHSTYRNISTNTYESNLYLRYDSYTWAERAYTWH\nHLA-B*18:31,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:32,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:33,YHSTYRNICTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:34,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:35,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*18:36,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYLWH\nHLA-B*18:37,YHSEYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:38,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:39,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:40,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:41,YHSTYRNISTNTYESNLYLRYESYTWAVLAYTWH\nHLA-B*18:42,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:43,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:44,YHSTYRNISTNTYESNLYLWYDSYTWAVLAYTWH\nHLA-B*18:45,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:46,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:47,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:48,YHSKYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:49,YHSTYRNISTNTYENNLYLRYDSYTWAVLAYTWH\nHLA-B*18:50,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWH\nHLA-B*27:01,YHTEYREICAKTYENTAYLNYHDYTWAVLAYEWY\nHLA-B*27:02,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B*27:03,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:04,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*27:05,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:06,YHTEYREICAKTDESTLYLNYDYYTWAELAYEWY\nHLA-B*27:07,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B*27:08,YHTEYREICAKTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:09,YHTEYREICAKTDEDTLYLNYHHYTWAVLAYEWY\nHLA-B*27:10,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B*27:11,YHTEYREICAKTDESTLYLSYNYYTWAVLAYEWY\nHLA-B*27:12,YHTEYREICTNTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:13,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:14,YHTEYREICAKTDEDTLYWTYHDYTWAVLAYEWY\nHLA-B*27:15,YHTEYREICAKTDESTLYLNYHDYTWAELAYTWY\nHLA-B*27:16,YHTEYREICTNTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:17,YHTEFREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:18,YHTEYREISTNTYESNLYLNYHDYTWAELAYEWY\nHLA-B*27:19,YHTEYREICAKTDEDTLYIRYHDYTWAVLAYEWY\nHLA-B*27:20,YHTEYREICAKTDESTLYLNYNYYTWAELAYEWY\nHLA-B*27:21,YHTEYREICAKTDESTLYLRYDYYTWAELAYEWY\nHLA-B*27:23,YHTEYRNIFTNTYESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:24,YHTEYREICAKTDESTLYLSYNYYSWAELAYEWY\nHLA-B*27:25,YHTEYREICAKTDESTLYLNYHDYTWAEWAYLWY\nHLA-B*27:26,YHTEYREICAQTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:27,YHTEYREICAKTDEDTLYLNYNYYTWAVLAYEWY\nHLA-B*27:28,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYTWH\nHLA-B*27:29,YHTEYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:30,YHTEYREICAKTDENIAYIRYHDYTWAVLAYEWY\nHLA-B*27:31,YHTEYREICAQTDESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:32,YHTEYREICAKTDEDTLYLSYHDYTWAVLAYEWY\nHLA-B*27:33,YHTEYREICAKTDESNLYLSYNYYTWAVLAYEWY\nHLA-B*27:34,YHTEYREICAKTDEDTLYLSYDYYTWAVLAYEWY\nHLA-B*27:35,YHTEYREICAKTDEDTLYLNYNFYTWAVLAYEWY\nHLA-B*27:36,YHTEYREICAKTDESTLYLNYHDYSLAVLAYEWY\nHLA-B*27:37,YYTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:38,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYLWY\nHLA-B*27:39,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:40,YHTEYREICAKTDESNLYLNYHDYTWAELAYEWY\nHLA-B*27:41,YHTEYREICAKTDEDTLYLNYDSYTWAVLAYEWY\nHLA-B*27:42,YHTEYREICAKTDEDNLYLNYHDYTWAVLAYEWY\nHLA-B*27:43,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B*27:44,YHTEYREICAKTYESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:45,YHTEYREICAKTDEDTLYLNYHDYTWAVRAYEWY\nHLA-B*27:46,YHTEYREICAKTDEDTLYLNYHYYTWAVLAYEWY\nHLA-B*27:47,YHTEYREICAKTDEDTLYLNYHDYTWAVDAYLSY\nHLA-B*27:48,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:49,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:50,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYESY\nHLA-B*27:51,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:52,YHTTYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:53,YHTEYREICAKTDEDIAYLNYHDYTWAVLAYEWY\nHLA-B*27:54,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B*27:55,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:56,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:57,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B*27:58,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:60,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:61,YHTEYREICAKTDESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:62,YHTEYREICAKTDENIAYLNYHDYTWAVLAYTWH\nHLA-B*27:63,YHTEYREICAKTDESTLYLNYHDYTWAELAYLWY\nHLA-B*27:67,YHTMYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:68,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*27:69,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*35:01,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:02,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:03,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:04,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:05,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:06,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B*35:07,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:08,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:09,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:10,YYATYREIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:11,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*35:12,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:13,YYATYREIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:14,YYATYRNIFTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*35:15,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYEWY\nHLA-B*35:16,YYATYREIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:17,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:18,YYATYRNIFTNTYESNLYIRYNYYTWAVRAYLWY\nHLA-B*35:19,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:20,YYATYRNISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:21,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWH\nHLA-B*35:22,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYLWY\nHLA-B*35:23,YYATYRNIFTNTYESNLYIRFDSYTWAVLAYLWY\nHLA-B*35:24,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWH\nHLA-B*35:25,YYSEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:26,YYAEYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:27,YYATYRNIFTNTYENNLYIRYDSYTWAVLAYLWY\nHLA-B*35:28,YYATYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:29,YYATYRNIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:30,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:31,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*35:32,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*35:33,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYEWY\nHLA-B*35:34,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B*35:35,YYATYRNIFTNTYESNLYIRYDSYTWAVLTYTWY\nHLA-B*35:36,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:37,YYATYRNIFTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B*35:38,YYATYRNIFTNTYESNLYIRYDFYTWAVDAYLWY\nHLA-B*35:39,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B*35:41,YYATYRNIFTNTYESNLYIRYDSCTWAVLAYLWY\nHLA-B*35:42,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:43,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:44,YYATYRNIFTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*35:45,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLSY\nHLA-B*35:46,YYAMYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:47,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:48,YYATYRNIFTNTYESNLYIRSDSYTWAVLAYLWY\nHLA-B*35:49,YYAEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:50,YHATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:51,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:52,YYATYRNIFTNTYECNLYIRYDSYTWAVLAYLWY\nHLA-B*35:54,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:55,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:56,YYATYRNIFTNTYENNLYIRYDFYTWAVLAYLWY\nHLA-B*35:57,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:58,YYATYRNIFTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B*35:59,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B*35:60,YYATYRNIFTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*35:61,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:62,YYATYRNIFTNTYESNLYIRYDSYTWAVWAYLWY\nHLA-B*35:63,YHTKYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:64,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:66,YYATYRNIFTNTYESNLYLSYDSYTWAVRAYEWY\nHLA-B*35:67,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:68,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*35:69,YYATYREIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:70,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:71,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLSY\nHLA-B*35:72,YYATYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:74,YYATYRNIFTNTYVSNLYIRYDFYTWAVLAYLWY\nHLA-B*35:75,YYATYRNIFTNTYESNLYLRYDFYTWAVLAYLWY\nHLA-B*35:76,YYATYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:77,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:78,YYATYRNIFANTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:79,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:80,YYATYREIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:81,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWH\nHLA-B*35:82,YYATYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:83,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:84,YHTTYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:85,YYATYRNICTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:86,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B*35:87,YYATYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*35:88,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*35:89,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:90,YYTTYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:91,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:92,YYATYRNIFTNAYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:93,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYTWY\nHLA-B*35:94,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:95,YYATYRNISTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:96,YYATYRNIFTNTYESNLYIRYDFYTWAELAYTWH\nHLA-B*35:97,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:98,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:99,YYATYRNIFTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B*35:100,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:101,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:102,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:103,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:104,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:105,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:106,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:107,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:108,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:109,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWH\nHLA-B*35:110,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:111,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:112,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:113,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:114,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:115,YYATYRNIFTNTYESNLYIRYDSYTWAVDAYLWY\nHLA-B*35:116,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:117,YYATYRNIFTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*35:118,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:119,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:120,YYATYRNIFTNTYESNLYIRHDSYTWAVLAYLWY\nHLA-B*35:121,YYATYRNIFTNTYESNLYIRYHSYTWAVLAYLWY\nHLA-B*35:122,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:123,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:124,YYSTYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:125,YYSTYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:126,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:127,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:128,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:131,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:132,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:133,YYATYRNIFTNTYESNLYIRYVSYTWAVLAYLWY\nHLA-B*35:135,YYATYRNICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:136,YYATYRNIFTNTDESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:137,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:138,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:139,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:140,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:141,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:142,YYSTYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:143,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:144,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*37:01,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:02,YHSTYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*37:04,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWH\nHLA-B*37:05,YHSTYREISTNTYEDNLYIRSNFYTWAVDAYTWY\nHLA-B*37:06,YHSKYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:07,YHSTYREISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B*37:08,YHSTYRNISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:09,YHSTYREISTNTYEDTLYLSYDYYTWAVDAYTWY\nHLA-B*37:10,YHSTYREISTNTYENTAYIRSNFYTWAVDAYTWY\nHLA-B*37:11,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B*37:12,YHSTYREISTNTYEDTLYIRYNYYTWAVDAYTWY\nHLA-B*37:13,YHSTYREISTNTYEDTLYIRSNFYTWAEDAYTWY\nHLA-B*37:14,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B*37:15,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:17,YHSTYREISTNTYEDTLYIRSNFYTWTVDAYTWY\nHLA-B*37:18,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:19,YHSTYREISTNTYEDTLYIRYNFYTWAVDAYTWY\nHLA-B*37:20,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:21,YHSTYREIFTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:22,YHATYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:23,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*38:01,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:02,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:03,YYSEYREISTNTDESTAYLRYNFYTWAVLTYTWY\nHLA-B*38:04,YYSEYREICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:05,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:06,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:07,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:08,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYMWY\nHLA-B*38:09,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:10,YYSEYRNICTNTYENIAYLRYNFYTWAELAYTWY\nHLA-B*38:11,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:12,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:13,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:14,YYSEYRNICTNTDENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:15,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:16,YYTEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:17,YYSEYRNICTNTYEDTLYLRYNFYTWAVLTYTWY\nHLA-B*38:18,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:19,YYSEYRNICTNTYENIAYLSYNFYTWAVLTYTWY\nHLA-B*38:20,YYSEYRNICTNTYENIAYIRYNFYTWAVLTYTWY\nHLA-B*38:21,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:22,YYSEYRNICTNTYENIAYLNYNFYTWAVLTYKWY\nHLA-B*38:23,YYSEYRNICTNTYENTAYFRYNFYTWAVLTYTWY\nHLA-B*39:01,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:02,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:03,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:04,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:05,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:06,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B*39:07,YYSEYRNICTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B*39:08,YYSEYREISTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B*39:09,YYSEYRNICTNTDESNLYLRSNFYTWAVLTYTWY\nHLA-B*39:10,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:11,YYSEYRNICTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B*39:12,YDSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:13,YYSEYREISTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:14,YYSEYRNICTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B*39:15,YYSEYRNICTNTDESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:16,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:17,YYSEYRNIYTNTDESNLYLRYNLYTWAVLTYTWY\nHLA-B*39:18,YYSEYRNICTNTDESNLYLRYNFYTWAEWTYTWY\nHLA-B*39:19,YYSTYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:20,YYSEYRNIYTNTYENNLYLRYNFYTWAVLTYTWY\nHLA-B*39:22,YYSEYREICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:23,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:24,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:26,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:27,YYSEYRNICTNTDVSNLYLRYNFYTWAVLTYTWY\nHLA-B*39:28,YYSEYRNICTNTDESNLYLTYNFYTWAVLTYTWY\nHLA-B*39:29,YYSEYRNICTNTDESNLYLSYDYYTWAVLTYTWY\nHLA-B*39:30,YYSEYRNICTNTDESNLYLRYNFYTWAVLAYTWY\nHLA-B*39:31,YHSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:32,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYLWH\nHLA-B*39:33,YYSEYRNICTNTDESNLYWTYNFYTWAVRAYLWY\nHLA-B*39:34,YYSEYRNICTNTDESNLYWTYNFYTWAVLAYTWY\nHLA-B*39:35,YYSEYRNICTNTDESNLYLRYNFYTWAELTYTWY\nHLA-B*39:36,YYSEYRNICTNTDESNLYLRYNFYTWAEWAYTWY\nHLA-B*39:37,YYSEYRNICTNTYESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:39,YYSEYRNISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:41,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:42,YYSEYRNICTNTDESNLYIRYNFYTWAVLTYTWY\nHLA-B*39:43,YYSEYRNICTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B*39:44,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:45,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:46,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:47,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYEWY\nHLA-B*39:48,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWH\nHLA-B*39:49,YYSEYREISTNTYESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:50,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYEWY\nHLA-B*39:51,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:52,YYSEYRNICTDTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:53,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:54,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:55,YYSEYRNICTNTYESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:56,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:57,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B*39:58,YYSEYRNIFTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:59,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:60,YYSEYRNICTNTDESNLYLRYNFYTWAALTYTWY\nHLA-B*40:01,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:02,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:03,YHTKYREISTNTYESNLYLSYDSYTWAVLAYEWY\nHLA-B*40:04,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:05,YHTKYREISTNTYESNLYLSYNYYTWAELAYLWY\nHLA-B*40:06,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:07,YHTKYREIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:08,YHTKYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:09,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:10,YYAKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:11,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:12,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:13,YHTKYREIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*40:14,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:15,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*40:16,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*40:18,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:19,YHTKYREISTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*40:20,YHTKYREISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*40:21,YYAMYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:23,YHTKYREISTNTYESNLYLRYNYYSWAERAYEWY\nHLA-B*40:24,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:25,YHTKYRNISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:26,YHTKYREISTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B*40:27,YHTKYREISTNTYESNLYLSYNNYTWAVLAYEWY\nHLA-B*40:28,YHTKYREISTNTYESNLYIRYNYYTWAELAYLWH\nHLA-B*40:29,YHTKYPEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:30,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B*40:31,YHTKYREISTNTYESNLYLSYDYYSLAVLAYEWY\nHLA-B*40:32,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*40:33,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B*40:34,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B*40:35,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:36,YHTKYREISTNTYESNLYLRYNFYSLAVLAYEWY\nHLA-B*40:37,YHTKYREISTNTYENNLYLSYNYYTWAVLAYEWY\nHLA-B*40:38,YHTKYREISTNTYESNLYLRYNSYSLAVLAYEWY\nHLA-B*40:39,YHTKYREISTNTYESNLYLSYNYYTWAVLAYTWY\nHLA-B*40:40,YYTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:42,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B*40:43,YHTKYREISTNTDESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:44,YHTKYREISTNTYESNLYWTYDYYTWAVLAYEWY\nHLA-B*40:45,YHTKYREISTNTYESNLYLSYNYYSWAVLAYEWY\nHLA-B*40:46,YHTEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:47,YHTKYREISTNTYENTAYLRYNYYSLAVLAYEWY\nHLA-B*40:48,YHTKYREISTNTYESNLYLRYNLYSLAVLAYEWY\nHLA-B*40:49,YYTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:50,YHTKYREISTNTDESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:51,YHTKYREISTNTYESNLYLRYNYYSWAELAYTWH\nHLA-B*40:52,YHTKYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*40:53,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:54,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:55,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:56,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:57,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:58,YYAKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:59,YHTKYREISTNTYESNLYIRYDSYSLAVLAYEWY\nHLA-B*40:60,YHTKYREISTNTYESNLYLRSDSYSLAVLAYEWY\nHLA-B*40:61,YHTKYREIYTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:62,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:63,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B*40:64,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:65,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:66,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:67,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:68,YHTKYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:69,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:70,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:71,YHTKYREISTNTYESNLYLSYNLYTWAVLAYLWY\nHLA-B*40:72,YHTKYREISTNTYESNLYLRYNYYTLAVLAYEWY\nHLA-B*40:73,YHTKYREISTNTYVSNLYLRYNYYSLAVLAYEWY\nHLA-B*40:74,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:75,YHTKYREICTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:76,YHTKYREISTNTYESKLYLRYNYYSLAVLAYEWY\nHLA-B*40:77,YHTKYREISTNTYESNLYLRYNFYTLAVLAYEWY\nHLA-B*40:78,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:79,YHTKYREISTNTYESNLHLRYNYYSLAVLAYEWY\nHLA-B*40:80,YHTKYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*40:81,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:82,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:83,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:84,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:85,YHTKYREISTNTYESNLYLSYNYYIWAVLAYEWY\nHLA-B*40:86,YHTKYREISTNTYESNLYWTYNFYTWAVLAYEWY\nHLA-B*40:87,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:88,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:89,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:90,YHTKYREISTNTYESNLYLSYNYYTWAVLAHEWY\nHLA-B*40:91,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:92,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B*40:93,YHTEYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:94,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:95,YHTKYREISTNTYESNLYWTYNYYTWAELAYEWY\nHLA-B*40:96,YHTKYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B*40:97,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:98,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*40:99,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:100,YHTKYREISTNTYESNLYLRFNYYSLAVLAYEWY\nHLA-B*40:101,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:102,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:103,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:104,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:105,YHTKYREISTNTYESNLYLSYNSYTWAVLAYEWY\nHLA-B*40:106,YHTKYRNIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:107,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:108,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:109,YHTKYREISTNTYESIAYWTYNYYTWAVLAYEWY\nHLA-B*40:110,YYTKYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B*40:111,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:112,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:113,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWH\nHLA-B*40:114,YHTKYREISTNTYESNLYLRYNYYSWAVLAYEWY\nHLA-B*40:115,YHTKYWEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:116,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:117,YHTKYREISTNTYENIAYLRYNYYSLAVLAYEWY\nHLA-B*40:119,YHTKYREISTNTYDSNLYLSYNYYTWAVLAYEWY\nHLA-B*40:120,YHTKYREISTNTYESNLYIRYDYYTWAVLAYEWY\nHLA-B*40:121,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:122,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:123,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:124,YHTKYREISTNTYESNLYLRYHDYSLAVLAYEWY\nHLA-B*40:125,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:126,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:127,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:128,YHTKYREISTNTYESNLYLRYNYYSLAVRAYEWY\nHLA-B*40:129,YYTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:130,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:131,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:132,YHTKYREISTNTYESNLYLRYNYYSLAVLAYESY\nHLA-B*40:134,YHTKYREISTNIYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:135,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:136,YHTKYREISTNTYESNLYLRYNYYTWAVDAYEWY\nHLA-B*40:137,YYAMYREISTNTYESNLYIRYNYYSLAVLAYEWY\nHLA-B*40:138,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:139,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:140,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:141,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:143,YHTKYREISTNTYESNLYLSFNYYTWAVLAYEWY\nHLA-B*40:145,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:146,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:147,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*41:01,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:02,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:03,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B*41:04,YHTKYREISTNTYESNLYLSYDYYTWAVDAYTWY\nHLA-B*41:05,YHTKYREISTNTYESKLYWRYNYYTWAVDAYTWY\nHLA-B*41:06,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:07,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:08,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B*41:09,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:10,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:11,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:12,YHAKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*42:01,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:02,YHSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:04,YYSEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*42:05,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:06,YYSEYRNIYAQTDESNLYLSYNFYTWAVDAYTWY\nHLA-B*42:07,YYSEYRNIYTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:08,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:09,YHSEYRNIYAQTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*42:10,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:11,YYSEYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:12,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:13,YYSEYRNIYAQTDESNLYIRYNYYTWAVDAYTWY\nHLA-B*42:14,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*44:02,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:03,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:04,YYTKYREISTNTYENTAYIRYDDYTWAVRAYTSY\nHLA-B*44:05,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:06,YYATYRNIFTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B*44:07,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:08,YYTMYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:09,YYTKYREISTNTYESNLYIRYDDYTWAVDAYLSY\nHLA-B*44:10,YYTKYREISTNTYENTAYIRFNLYTWAVLAYLSY\nHLA-B*44:11,YYTKYREISTNTYENTPYIRYDDYTWAVDAYLSY\nHLA-B*44:12,YYTKYRNISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:13,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:14,YYTKYREISTNTYENTAYIRYNDYTWAVDAYLSY\nHLA-B*44:15,YHTKYREISTNTYESTAYWRYNLYTWAVDAYLSY\nHLA-B*44:16,YYTKYREISTNTYENTAYIRYDDYTWAVDAYEWY\nHLA-B*44:17,YYTKYREISTNTYENTAYIRYDSYTWAVDAYLSY\nHLA-B*44:18,YHTKYREISTNTYENIAYWRYNLYTWAVDAYLSY\nHLA-B*44:20,YYTKYREISTNTYENTAYWTYDDYTWAVDAYLSY\nHLA-B*44:21,YYTKYREISTNTYENTAYIRYDDYTWAVDAYESY\nHLA-B*44:22,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:24,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:25,YYTKYREISTNTYENIAYIRYDYYTWAVDAYLSY\nHLA-B*44:26,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:27,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:28,YYTKYREISTNTYENTAYIRYDDYTWAVRAYLSY\nHLA-B*44:29,YYTKYREISTNTYENTAYIRYDDYTWAVLTYLSY\nHLA-B*44:30,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:31,YYTKYREISTNTYENTAYLRYNYYSLAVLAYESY\nHLA-B*44:32,YYTKYPEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:33,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:34,YYAKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:35,YYTKYREISTNTYENTAYIRYDDYTWAVEAYLSY\nHLA-B*44:36,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:37,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLWY\nHLA-B*44:38,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:39,YYPKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:40,YYTEYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:41,YYTKYREISTNTYENTAYLRYDDYTWAVDAYLSY\nHLA-B*44:42,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLWY\nHLA-B*44:43,YYTKYREISTNTYENTAYIRYDSYTWAVLAYLSY\nHLA-B*44:44,YYTEYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:45,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:46,YYTKYREISTNTYESNLYIRYDDYTWAVLAYLSY\nHLA-B*44:47,YYTKYREISTNTYENTAYWTYDDYTWAVLAYLSY\nHLA-B*44:48,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:49,YDTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:50,YYTKYREISTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B*44:51,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:53,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:54,YYTKYREISTNTYENTAYLSYDDYTWAVLAYLSY\nHLA-B*44:55,YHTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:57,YYTMYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:59,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:60,YYTMYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:62,YYTKYREISTNTYENTAYIRYNYYTWAVDAYLSY\nHLA-B*44:63,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:64,YYTKYREISTNTYENTAYIRYDDYTWAVLAYEWY\nHLA-B*44:65,YYTKYREISTNTYENTAYLRYDDYTWAVLAYLSY\nHLA-B*44:66,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:67,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:68,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:69,YYTKYWEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:70,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:71,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:72,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:73,YYTKYREISTNTYENTAYIRYDDYTWAVDGYLSY\nHLA-B*44:74,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:75,YYTKYREISTNTYENNLYIRYDYYTWAVDAYLSY\nHLA-B*44:76,YYTKYREISTNTYENTAYIRYDDYTWAERAYLSY\nHLA-B*44:77,YYTKYREISTNTYENTAYIRYNYYTWAVLAYLSY\nHLA-B*44:78,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:79,YYTKYREISTNTYENTAYIRYDDYTWAELAYLSY\nHLA-B*44:80,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:81,YYTNYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:82,YYTKYREISTNTYENTAYIRYNYYTWAVLAYLSY\nHLA-B*44:83,YYATYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:84,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:85,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:86,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:87,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:88,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:89,YYTKYREISTNTYENTAYIRYDDYTWAVDTYLSY\nHLA-B*44:90,YYTKYREIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-B*44:91,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLWY\nHLA-B*44:92,YYTKYREISTNTYENTAYIRYDDYTWAMLAYLSY\nHLA-B*44:93,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:94,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:95,YYTKYREISTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B*44:96,YYTKYREISTNTYENTAYIGYDDYTWAVLAYLSY\nHLA-B*44:97,YYTKYREICAKTDENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:98,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:99,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:100,YYTKYREISTNTYENTAYWRYDDYTWAVDAYLSY\nHLA-B*44:101,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:102,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:103,YHTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:104,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:105,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:106,YYTKYREISTNTYENTAYLSYDDYTWAVDAYLSY\nHLA-B*44:107,YYTKYREISTNTYENTAYIRYDYYTWAVLAYLSY\nHLA-B*44:109,YYTKYREISTNTYESTAYIRYDDYTWAVLAYLSY\nHLA-B*44:110,YYTKYREISTNTYENTAYISYDDYTWAVLAYLSY\nHLA-B*45:01,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:02,YHTKYREISTNTYESNLYWRYNFYTWAVDAYLSY\nHLA-B*45:03,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:04,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLWY\nHLA-B*45:05,YHTKYREISTNTYESNLYWRYNLYTWAVDVYLSY\nHLA-B*45:06,YHTKYREIYAQTDESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:07,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:08,YHTKYREISTNTYESNLYWRYNLYTWAVDAYTWY\nHLA-B*45:09,YHTKYREISTNTYESNLYWRYDSYTWAVDAYLSY\nHLA-B*45:10,YHTKYREISTNTYESNLYWRYNLYTWAVDAYEWY\nHLA-B*45:11,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:12,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*46:01,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:02,YYAMYREKYRQTGVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:03,YYAMYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-B*46:04,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:05,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:06,YYAMYREKYRQTDVSNLYLRYDSYSLAVLAYEWY\nHLA-B*46:08,YYAMYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-B*46:09,YYAMYREKYRQTDVSNLYLRYDSYTWAVWAYLWY\nHLA-B*46:10,YYTMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:11,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYLWY\nHLA-B*46:12,YYAMYREKYRQTDVSNLYLSYDSYTWAEWAYLWY\nHLA-B*46:13,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYLWY\nHLA-B*46:14,YHAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:16,YYAMYREKFRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:17,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLSY\nHLA-B*46:18,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYTWY\nHLA-B*46:19,YYAMYREKYRQTDVSNLYLRYDSYTWAVLTYLWY\nHLA-B*46:20,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:21,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYTWY\nHLA-B*46:22,YYAMYREKYRRTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:23,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:24,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*47:01,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*47:02,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B*47:03,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B*47:04,YYTKYREISTNTYENTAYLNYHDYTWAVLAYEWY\nHLA-B*47:05,YYTKYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*47:06,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*47:07,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*48:01,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:02,YYSEYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*48:03,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*48:04,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:05,YYSEYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*48:06,YYSEYRNIFTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:07,YYSEYREISTNTYESNLYLSYNFYSLAVLAYEWY\nHLA-B*48:08,YYSEYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*48:09,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:10,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:11,YYSEYREISTNTYESNLYLSYNYYSLAVLAYELY\nHLA-B*48:12,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:13,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:14,YYSEYREISTNTYESNLYLSYNSYTLAVLAYEWY\nHLA-B*48:15,YYSEYREISTNTYESNLYLSYNYYSLAELAYEWY\nHLA-B*48:16,YYSEYRVISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:17,YYSEYREISTNTYESNLYIRYNFYSLAVLAYEWY\nHLA-B*48:18,YYSEYREISTNTYESIAYLSYNYYSLAVLAYEWY\nHLA-B*48:19,YYSEYREISTNTYESNLYLSYNYYSLAVWAYEWY\nHLA-B*48:20,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:21,YYSEYREISTNTYESNLYLNYNYYSLAVLAYEWY\nHLA-B*48:22,YHSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:23,YYSEYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*49:01,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:02,YHTKYREISTNTYENTAYWRYNLYTWAELAYLWY\nHLA-B*49:03,YHATYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:04,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B*49:05,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B*49:06,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:07,YHTKYREISTNTYENIAYWRYNLYTWAELAYEWY\nHLA-B*49:08,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:09,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWH\nHLA-B*49:10,YYTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*50:01,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:02,YHTKYREISTNTYESNLYWRYNLYTWAELAYLSY\nHLA-B*50:04,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:05,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:06,YHTRYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:07,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:08,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:09,YHTKYREISTNTYESNLYWRYNFYTWAELAYLWY\nHLA-B*51:01,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:02,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B*51:03,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLGH\nHLA-B*51:04,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B*51:05,YYATYRNIFTNTYENIAYWTYNYYTWAVRAYLWY\nHLA-B*51:06,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWH\nHLA-B*51:07,YYATYRNISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:08,YYATYRNIFTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B*51:09,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*51:12,YYATYRNIFTNTYENIADWTYNYYTWAELAYLWH\nHLA-B*51:13,YYATYRNIFTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B*51:14,YYATYRNIFTNTYENIAYWTYKYYTWAELAYLWH\nHLA-B*51:15,YYATYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B*51:16,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B*51:17,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:18,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:19,YYATYRNIFTNTYENIAYWTYNYYTWAVLTYLWH\nHLA-B*51:20,YYATYRNIFTNTDENIAYWTYNYYTWAVDAYLWH\nHLA-B*51:21,YYATYRNIFTNTYENIAYWTYNYYTWAELAYTWH\nHLA-B*51:22,YYATYRNICTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:23,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLSY\nHLA-B*51:24,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:26,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:28,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:29,YYATYRNIFTNTYENIAYWTYNYYTWAERAYLWH\nHLA-B*51:30,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:31,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWH\nHLA-B*51:32,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:33,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:34,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWY\nHLA-B*51:35,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:36,YYATYRNIFTNTYENIAYWTYNYYTWAEDAYTWY\nHLA-B*51:37,YYATYRNIFTNTYENIAYWTYDSYTWAELAYLWH\nHLA-B*51:38,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:39,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:40,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B*51:42,YYATYRNIFTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B*51:43,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:45,YYATYRNIFTNTYENIAYLRYDSYTWAELAYLWH\nHLA-B*51:46,YYATYRNIFTNTYENIAYITYNYYTWAELAYLWH\nHLA-B*51:48,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:49,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:50,YYATYRNIFTNTYENGLYWTYNYYTWAELAYLWH\nHLA-B*51:51,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:52,YYATYRNIFTNTHENIAYWTYNYYTWAELAYLWH\nHLA-B*51:53,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:54,YYATYRNIFTNTYENTAYWTYNYYTWAVRAYLWY\nHLA-B*51:55,YYATYRNIFTNTYENIAYWTYNYYTWAEQAYLWH\nHLA-B*51:56,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B*51:57,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:58,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:59,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWY\nHLA-B*51:60,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:61,YYATYRNIFTNTYENIAYWTYNYYTWAEWAYLWH\nHLA-B*51:62,YYATYRNIFTNTYENIAYLRYNLYTWAELAYLWH\nHLA-B*51:63,YYATYRNIFTNTYENIAYWTYNSYTWAELAYLWH\nHLA-B*51:64,YYATYRNIFTNTYENIAYLSYNYYTWAELAYLWH\nHLA-B*51:65,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:66,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:67,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:68,YDATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:69,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:70,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:71,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:72,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:73,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*51:74,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:75,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:76,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:77,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:78,YYATYRNIFTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B*51:79,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:80,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:81,YYATYRNIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*51:82,YYATYRNIFTNTYENIAYWTYNYYTWAERAYEWH\nHLA-B*51:83,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:84,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:85,YYATYRNIFTNTYENIAYWTYHDYTWAELAYLWH\nHLA-B*51:86,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:87,YYATYRNIFTNTYENIAYWTYNYYTWADLAYLWH\nHLA-B*51:88,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:89,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:90,YYATYRNIFTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B*51:91,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:92,YYATYRNIFTNTYENIAYWTYDFYTWAELAYLWH\nHLA-B*51:93,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWY\nHLA-B*51:94,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:95,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:96,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:01,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:02,YYAMYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:03,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B*52:04,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:05,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:06,YYATYREIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:07,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:08,YYATYREISTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B*52:09,YYATYREISTNTYESIAYWTYNYYTWAELAYLWH\nHLA-B*52:10,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*52:11,YYATYREISTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B*52:12,YYATYREISTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B*52:13,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:14,YYATYREISTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B*52:15,YYATYREISTNTYENIAYWTYNYYTWAELAYLSH\nHLA-B*52:16,YYSEYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:17,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:18,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:19,YYATYREISTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B*52:20,YYATYREISTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B*52:21,YYATYREISTNTYENIAYWTYNYYTWAEWAYLWH\nHLA-B*53:01,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:02,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWH\nHLA-B*53:03,YYATYRNIFTNTYEDTLYIRYDSYTWAVLAYLWY\nHLA-B*53:04,YYATYRNIFTNTYENIAYIRYDFYTWAVLAYLWY\nHLA-B*53:05,YYATYRNIFTNTYESIAYIRYDSYTWAVLAYLWY\nHLA-B*53:06,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWH\nHLA-B*53:07,YYATYRNIFTNTYENIAYIRSNFYTWAVLAYLWY\nHLA-B*53:08,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B*53:09,YYATYRNISTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B*53:10,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:11,YYATYRNIFTNTYENTAYIRYDSYTWAVRAYLWY\nHLA-B*53:12,YYATYRNIFTNTYESTAYIRYDSYTWAVLAYLWY\nHLA-B*53:13,YYATYRNIFTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B*53:14,YYATYRNIFTNTYENIAYLSYDSYTWAVLAYLWY\nHLA-B*53:15,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:16,YYATYRNIFTNTYESIAYIRYDSYTWAVRAYLWY\nHLA-B*53:17,YYATYREISTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:18,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:19,YYATYRNIFTNTYENIAYIRYNYYTWAVLAYLWY\nHLA-B*53:20,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:21,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:22,YYATYRNIFTNTYENIAYIRYDSYTWAVDAYLSY\nHLA-B*53:23,YYATYRNIFTNTDENIAYIRYDSYTWAVLAYLWY\nHLA-B*54:01,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:02,YHAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:03,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*54:04,YYAGYRNIYAQTDESNLYWTYNYYTWAVLAYTWY\nHLA-B*54:06,YYAGYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*54:07,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:09,YYAGYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B*54:10,YYAGYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B*54:11,YYAGYRNIYAQTDESNLYWTYNYYSWAVLAYTWY\nHLA-B*54:12,YYAGYRNIYAQTDENIAYWTYNLYTWAVLAYTWY\nHLA-B*54:13,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:14,YYAGYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B*54:15,YYAGYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B*54:16,YYAGYRNIYAQTDESNLYWTYDLYTWAVLAYTWY\nHLA-B*54:17,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:18,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:19,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:20,YYAGYRNIYAQTDESNLYWTYNLYTWAERAYTWY\nHLA-B*54:21,YYSGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:22,YYAGYRNIYAQTDESNLYWTYNLYSWAVLAYTWY\nHLA-B*54:23,YYAGYRNIYAQTEESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:01,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:02,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:03,YYAEYRNIYAQTDVSNLYWTYNLYTWAELAYTWY\nHLA-B*55:04,YYAEYRNIYAQTDESNLYLSYNYYTWAVLAYTWY\nHLA-B*55:05,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:07,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:08,YYAEYRNIYAQTDESNLYLRYNYYTWAVLAYLWY\nHLA-B*55:09,YYAEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B*55:10,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:11,YYAEYRNIYAQTDESNLYWMYNLYTWAELAYTWY\nHLA-B*55:12,YYAEYRNIYAQTDENNLYWTYNLYTWAVLAYTWY\nHLA-B*55:13,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYTWY\nHLA-B*55:14,YYAEYRNIYAQTDESNLYIVYDSYTWAELAYTWY\nHLA-B*55:15,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:16,YHAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:17,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B*55:18,YYAEYREISTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:19,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:20,YYAEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*55:21,YYAEYRNIYAQTDESNLYWTYNLYTWAEWAYTWY\nHLA-B*55:22,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYEWY\nHLA-B*55:23,YYAEYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B*55:24,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYEWY\nHLA-B*55:25,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:26,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:27,YYAEYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B*55:28,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B*55:29,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:30,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:31,YYAEYRNIYAQTYESNLYWTYNLYTWAELAYTWY\nHLA-B*55:32,YYAEYRNIYAQTDESNLYWTYNSYTWAVLAYTWY\nHLA-B*55:33,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:34,YYAEYREISAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:35,YYAMYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:36,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:37,YYAEYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B*55:38,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:39,YYAEYRNIYAQTDESNLYWTYNLHTWAVLAYTWY\nHLA-B*55:40,YYAEYREIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:41,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:42,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:43,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*56:01,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:02,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B*56:03,YYAEYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*56:04,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B*56:05,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B*56:06,YYATYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B*56:07,YYAEYRNIYAQTDENTAYWTYNLYTWAVLAYLWY\nHLA-B*56:08,YYAEYREKYGQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:09,YYAEYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*56:10,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYTWY\nHLA-B*56:11,YYAEYRNIYAQTDESNLYIRYDFYTWAVLAYLWY\nHLA-B*56:12,YYAEYRNIYAQTDESNLYIRYNYYTWAVLAYTWY\nHLA-B*56:13,YYAEYRNIYAQTDESNLYWTYNLYTWAVDAYLWY\nHLA-B*56:14,YYAEYREKYRQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:15,YYAEYRNIYAQTDESNLYWTYNYYTWAVLAYLWY\nHLA-B*56:16,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:17,YYAEYRNIYANTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:18,YYAEYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B*56:20,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:21,YYAEYRNIYAQTDENIAYWTYNYYTWAELAYLWH\nHLA-B*56:22,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYLWY\nHLA-B*56:23,YYAEYRNIYANTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*56:24,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:25,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYLWY\nHLA-B*56:26,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:27,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:29,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*57:01,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:02,YYAMYGENMASTYENIAYIVYNYYTWAVRAYLWY\nHLA-B*57:03,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B*57:04,YYAMYGENMASTYENIAYIVYDDYTWAVRAYLWY\nHLA-B*57:05,YYAMYGENMASTYENIAYIRYNYYTWAVRAYLWY\nHLA-B*57:06,YYAMYGENMASTYENIAYIVYDSYIWAVLAYLWY\nHLA-B*57:07,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLSY\nHLA-B*57:08,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:09,YYAMYGENMASTYENIAYIVYNYYTWAEDAYLWY\nHLA-B*57:10,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:11,YYAMYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*57:12,YYAMYGENMASTYESNLYIVYNYYTWAVRAYLWY\nHLA-B*57:13,YYAMYGENMASTYENIAYIVYDSYTWAERAYEWY\nHLA-B*57:14,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWH\nHLA-B*57:15,YYAMYGENVASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:16,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:17,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B*57:18,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:19,YYAMYGENMASTYENIAYIVYDSYTWAVRAYLWY\nHLA-B*57:20,YYAMYGKNMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:21,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:22,YYAMYGENMASTYENIAYIVYDSYTWAELAYLWY\nHLA-B*57:23,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:24,YYAMYGENMASTYENIAYIVYDSYTWAVDAYLWY\nHLA-B*57:25,YYAMYGENMASTYENIAYIVYDSYTWAVLAYEWY\nHLA-B*57:26,YYAMYGENMASTYENIAYIVYDSYTWAVLAYTWY\nHLA-B*57:27,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:29,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:30,YYAMYGENMASTYENIAYIVYDSYTWAARAYLWY\nHLA-B*57:31,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:32,YYAMYGENMASTYENIAYIVYHDYTWAVLAYLWY\nHLA-B*58:01,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:02,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*58:04,YYATYEENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:05,YYATYGENMASTYENIAYIRYDSYTLAALAYTWY\nHLA-B*58:06,YYATYGENMASTYENIAYLWYDSYTWAELAYLWY\nHLA-B*58:07,YYATYGENMASTYENIAYLWYDSYTWAVLAYLSY\nHLA-B*58:08,YYATYGENMASTYENIAYWTYNYYTWAELAYLWH\nHLA-B*58:09,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWH\nHLA-B*58:11,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:12,YYSTYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:13,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:14,YYATYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*58:15,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:16,YYATYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B*58:18,YYATYGENMASTYENIAYLSYDSYTWAVLAYLWY\nHLA-B*58:19,YYATYGENMASTYENIAYIRYDSYTWAELAYLWY\nHLA-B*58:20,YYATYGENMASTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*58:21,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:22,YYATYGENMASTYENIAYIRYDSYTWAVRAYLWY\nHLA-B*58:23,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:24,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:25,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*58:26,YYATYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B*58:27,YYATYGENMASTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*58:28,YYATYGENMASTYENIAYIRYNYYTWAVLAYLWY\nHLA-B*58:29,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:30,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*59:01,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B*59:02,YYAEYRNIFTNTYENIAYWTYNYYTWAVLAYTWY\nHLA-B*59:03,YYAEYRNIFTNTYENIAYWTYNFYTWAVLAYTWY\nHLA-B*59:04,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B*59:05,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B*67:01,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*67:02,YYSGYREKYRQADVSNLYLRYNFYTWAVLTYTWY\nHLA-B*73:01,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B*73:02,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B*78:01,YYATYRNIFTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B*78:02,YYATYRNIFTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B*78:03,YYATYRNICTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B*78:04,YYATYRNIFTNTYESNLYWTYNYYTWAVLAYLWY\nHLA-B*78:05,YYATYREISTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B*78:06,YYATYREISTNTYENNLYWTYNYYTWAELAYLWH\nHLA-B*78:07,YYATYRNIFTNTDESNLYWTYNYYTWAELAYTWH\nHLA-B*81:01,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:02,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:03,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:05,YYSEYRNIFAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*82:01,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B*82:02,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B*82:03,YYSEYRNIYAQTDESNLYLRYNLYTWAVDAYLSY\nHLA-B*83:01,YYSEYRNIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-C*01:02,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:03,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*01:04,YFSGYREKYRQTDVSNLYLWCDSYTWAEWAYTWY\nHLA-C*01:05,YFSGYREKYRQTDVSNLYLRSDYYTWAERAYTWY\nHLA-C*01:06,YFSGYREKYRQTDVSNLYLWCDYYTWAVRAYTWY\nHLA-C*01:07,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:08,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:09,YFSGYREKYRQTDVSNLYLWCDYYTWAEWAYTWY\nHLA-C*01:10,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYEWY\nHLA-C*01:11,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:12,YFSGYREKYRQTDVSNLYLWYDYYTWAERAYTWY\nHLA-C*01:13,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:14,YFSGYREKYRQTDVNKLYLWCDYYTWAERAYTWY\nHLA-C*01:15,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:16,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:17,YFSGYREKYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:18,YFSGYREKYHQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:19,YFSGYREKYRQTDVCNLYLWCDYYTWAERAYTWY\nHLA-C*01:20,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:21,YFSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*01:22,YFSGYREKYRQTDVSNLYLWCDYYTWAELAYTWY\nHLA-C*01:23,YFSGYREKYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:24,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*01:25,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:26,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:27,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:28,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:29,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTCY\nHLA-C*01:30,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWH\nHLA-C*01:31,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYMWY\nHLA-C*01:32,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:33,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:34,YFSGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C*01:35,YFSGYREKYRQTDVSNLYLWCDYYTWAELAYLWY\nHLA-C*01:36,YFSGYREKYRQTDVSNLYLRFDYYTWAERAYTWY\nHLA-C*01:38,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:39,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:40,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*02:02,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:03,YYAGYREKYRQTDVNKLYLRYDSYTWAVLAYEWY\nHLA-C*02:04,CYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:05,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYEWY\nHLA-C*02:06,YYAGYREKYRQTDVNKLYLRYDLYTWAEWAYEWY\nHLA-C*02:07,YYAGYREKYRQTDVNKLYLRYHDYTWAEWAYEWY\nHLA-C*02:08,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:09,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:10,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:11,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:12,YYAGYREKYRQADVSKLYLRYDSYTWAEWAYEWY\nHLA-C*02:13,YYAGYREKYRQTDVNKLYLRYDSYTWAAWAYEWY\nHLA-C*02:14,YDAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:15,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:16,YYAGYREKYRQTDVNKLYLRYDSYTWAELAYEWY\nHLA-C*02:17,YYAGYREKYRQTDVNKLYLWFDSYTWAEWAYEWY\nHLA-C*02:18,YYAGYREKYRQTDVNKLYLRYDSYTWAALAYEWY\nHLA-C*02:19,YYAGYREKYRQTDVNKLYLRYDYYTWAEWAYEWY\nHLA-C*02:20,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:21,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:22,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYEWY\nHLA-C*02:23,YYAGYREKYRQTDVNKLYLRYDYYTWAEWAYEWY\nHLA-C*02:24,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:26,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:27,YYAGYREKYRQTDVSNLYLRYDSYTWAEWAYEWY\nHLA-C*02:28,YYAGYREKYRQTDVNKLYLRYDSYTWAVWAYEWY\nHLA-C*02:29,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:30,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:31,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:32,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYEWY\nHLA-C*02:33,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:34,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:35,YYAGYREKYRQTDVNKLHLRYDSYTWAEWAYEWY\nHLA-C*02:36,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:37,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:39,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:40,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*03:01,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:02,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:03,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:04,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:05,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:06,YYAGYREKYRQTDVSNLYIRYVYYTWAELAYLWY\nHLA-C*03:07,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C*03:08,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:09,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:10,YYAGYREKYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C*03:11,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:12,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C*03:13,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C*03:14,YYAGYREKYRQTDVSNLYIRYDSYTLAALAYTWY\nHLA-C*03:15,YYAGYREKYRQADVNKLYLRYDSYTWAELAYLWY\nHLA-C*03:16,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYTWY\nHLA-C*03:17,YYAGYREKYRQTDVSNLYLWYDYYTWAELAYLWY\nHLA-C*03:18,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:19,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C*03:21,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C*03:23,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:24,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:25,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:26,YYAGYREKYRQTDVSNLYIRYDFYTWAELAYLWY\nHLA-C*03:27,YYAGYREKYRQADVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:28,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYLWY\nHLA-C*03:29,YYAGYRENYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C*03:30,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:31,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:32,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:33,YYAGYREKYRQTDVSNLCLRYDSYTWAELAYLWY\nHLA-C*03:34,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYEWY\nHLA-C*03:35,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C*03:36,YYAGYREKYRQTDVSNLYLRYDSYTWAVLAYLWY\nHLA-C*03:37,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:38,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:39,YDSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:40,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:41,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:42,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:43,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:44,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:45,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C*03:46,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:47,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:48,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:49,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYLWY\nHLA-C*03:50,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:51,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWH\nHLA-C*03:52,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:53,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:54,YYAGYREKYRQTDVSNLYIRYDYYTWAELPYLWY\nHLA-C*03:55,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C*03:56,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:57,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:58,YYAGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*03:59,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:60,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:61,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:62,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:63,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:64,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:65,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:66,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:67,YDAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:68,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:69,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:70,YYAGYREKYRQTDESNLYIRYDYYTWAELAYLWY\nHLA-C*03:71,YYAGYREKYRQTDVSNLYLWYDSYTWAELAYLWY\nHLA-C*03:72,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:73,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:74,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:75,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:76,HYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:77,YYAGYREKYRQTDVSNLYIRYDYYTWAVLAYLWY\nHLA-C*03:78,YYAGYREKYRQTDVSNLYIRYDYYTWAEMAYLWY\nHLA-C*03:79,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:80,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C*03:81,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:82,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:83,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:84,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:85,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:86,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*03:87,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:88,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:89,YYAGYREKYRQTDVSNLYLRFDSYTWAELAYLWY\nHLA-C*03:90,YYAGYREKYRQTDVSNLYIRSDYYTWAELAYLWY\nHLA-C*03:91,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:92,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C*03:93,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:94,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*04:01,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:03,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:04,YSAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C*04:05,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:06,YYAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C*04:07,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:08,YSAGYREKYRQADVNKLYLRFNFYTWAERAYLWY\nHLA-C*04:10,YSAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:11,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:12,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:13,YSAGYREKYRQADVNKLYLRFNFYTWAALAYTWY\nHLA-C*04:14,YSAGYREKYRQADVNKLYLRFNFYTWAEQAYTWY\nHLA-C*04:15,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*04:16,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:17,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*04:18,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:19,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:20,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:23,YSAGYREKYRQADVNKLYLRFDFYTWAERAYTWY\nHLA-C*04:24,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:25,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:26,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:27,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:28,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:29,YSAGYREKYRQADVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:30,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:31,YSAGYREKYRQADVNKLYLRFNFYTWVERAYTWY\nHLA-C*04:32,YSAGYREKYRQADVNKLYLRFNFYTWAERAYEWY\nHLA-C*04:33,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:34,YSAGYREKYRQADVNKLYLRFNFYTWAVLAYLWY\nHLA-C*04:35,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:36,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:37,YSAGYREKYRQADVNKLYLWCNFYTWAERAYTWY\nHLA-C*04:38,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:39,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:40,YSAGYREKYRQADVNKLYFRFNFYTWAERAYTWY\nHLA-C*04:41,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:42,YDAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:43,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:44,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:45,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:46,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:47,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:48,YSAGYREKYRQADVNKLYLRFNFYTWAERPYTWY\nHLA-C*04:49,YSAGYWEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:50,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:51,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:52,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:53,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:54,YSAGYREKYRQADVNKLYLRFDSYTWAERAYTWY\nHLA-C*04:55,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:56,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:57,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:58,YSAGYREKYRQADVNKLYLRFNFYTLAALAYTWY\nHLA-C*04:60,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:61,YSAGYREKYRQADVNKLYLRFNFYTWAARAYTWY\nHLA-C*04:62,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:63,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:64,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:65,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:66,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:67,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:68,YSAGYREKYRQADVNKLYLRFNFYTWAAQAYTWY\nHLA-C*04:69,YSAGYGEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:70,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*05:01,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:03,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:04,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYTWY\nHLA-C*05:05,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:06,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:08,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYTWY\nHLA-C*05:09,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:10,YYAGYREKYRQTDVNKLYIRYNFYTWAERAYTWY\nHLA-C*05:11,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYTWY\nHLA-C*05:12,YYAGYREKYRQTDVNKLYLRYNFYTWAVRAYTWY\nHLA-C*05:13,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:14,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:15,YYAGYWEKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:16,YYAGYREKYRQTDVNKLYLWYNFYTWAERAYTWY\nHLA-C*05:17,YYAGYREKYRQTDVNKLYLRYNFYTWAALAYTWY\nHLA-C*05:18,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:19,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:20,YYAGYREKYRQTDVNNLYLRYNFYTWAERAYTWY\nHLA-C*05:21,YYAGYREKYRQTDVNKLHLRYNFYTWAERAYTWY\nHLA-C*05:22,YYAGYREKYRQTDVNKLYLRYDFYTWAERAYTWY\nHLA-C*05:23,YYAGYREKYRQTDVNKLYLRYNFYTLAERAYTWY\nHLA-C*05:24,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:25,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:26,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:27,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYLWY\nHLA-C*05:28,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:29,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:30,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:31,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:32,YYAGYREKYRQTDVNRLYLRYNFYTWAERAYTWY\nHLA-C*05:33,YYAGCREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:34,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:35,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:36,YYAGYRENYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:37,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:38,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:39,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYLWY\nHLA-C*05:40,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:41,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:42,YYAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:43,YDAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:44,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:45,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*06:02,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:03,YYSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:04,YDSGYREKYRQADVNKLYLWYDSYTWAELAYTWY\nHLA-C*06:05,YDSGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C*06:06,YDSGYREKYRQADVNKLYLWYDSYTWAERAYTWY\nHLA-C*06:07,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:08,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYEWY\nHLA-C*06:09,YDSGYREKYRQADVNKLYLWYNFYTWAEWAYTWY\nHLA-C*06:10,YDPGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:11,YDSGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*06:12,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:13,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:14,YDSGYREKYRQADVNKLYIWYDSYTWAEWAYTWY\nHLA-C*06:15,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:17,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:18,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:19,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:20,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:21,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:22,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:23,YDSGYREKYRQADVNKLYLWCDSYTWAEWAYTWY\nHLA-C*06:24,YDSGYREKYRQADVNKLYLWYDSYTWAEWAHTWY\nHLA-C*06:25,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:26,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:27,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:28,YDAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:29,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:30,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:31,YDSGYREKYRQADVNKLYLWYDSYTWAAWAYTWY\nHLA-C*06:32,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:33,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:34,YDSGYREKYRQADVNKLYLWYDFYTWAEWAYTWY\nHLA-C*06:35,YDSGYREKYRQADVNKLYIRSDSYTWAEWAYTWY\nHLA-C*06:36,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:37,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:38,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:39,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:40,YDSGYREKYRQADVNKLYLWYDSYTWAEWTYTWY\nHLA-C*06:41,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:42,YDSGYREKYRQADVNKLYLWYDSYTRAEWAYTWY\nHLA-C*06:43,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:44,YDSGYRENYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:45,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*07:01,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:02,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:03,YDSGYREKYRQADVSNLYLRSDSYTWAALAYLWY\nHLA-C*07:04,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:05,YDSGYREKYRQADVSNLYLNYDSYTLAALAYTWY\nHLA-C*07:06,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:07,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C*07:08,YDSGYREKYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C*07:09,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C*07:10,YDSGYREKYRQADVSNLYIRSDSYTLAALAYTWY\nHLA-C*07:11,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:12,YDSGYREKYRQADVSNLYFRYDFYTWAADAYTWY\nHLA-C*07:13,YDSGYREKYRQADVSNLYLRSDFYTLAALAYTWY\nHLA-C*07:14,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:15,YDSGYREKYRQADVSNLYLRSDSYTLAALAYEWY\nHLA-C*07:16,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:17,YDSGYREKYRQADVSNLYLRSDSYTWAALAYTWY\nHLA-C*07:18,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:19,YDSGYRENYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:20,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:21,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:22,YDSGYRENYRQADVSNLYLRYDSYTLAAWAYTWY\nHLA-C*07:23,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:24,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:25,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:26,YYSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:27,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:28,YDSGYRENYRQADVSNLYLRYNFYTLAALAYTWY\nHLA-C*07:29,YDSGYREKYRQADVSNLYLRSDYYTLAALAYTWY\nHLA-C*07:30,YDSGYRENYRQADVSNLYLRYDSYTLAGLAYTWY\nHLA-C*07:31,YDSGYREKYRQADVSNLYLWYDSYTLAALAYTWY\nHLA-C*07:35,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:36,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:37,YDSGYREKYRQADVSNLYLRSDSYTLAARAYTWY\nHLA-C*07:38,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:39,YDSGYREKYRQTDVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:40,YDSGYRENYRQTDVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:41,YDSGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*07:42,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:43,YDSGYREKYRQADVSNLYIRYDSYTLAALAYTWY\nHLA-C*07:44,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:45,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:46,YDSEYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:47,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:48,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:49,YDSGYREKYRQADVNNLYLRSDSYTLAALAYTWY\nHLA-C*07:50,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:51,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:52,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:53,YDSGYRENYRQADVSNLYLRYDSYTLAAQAYTWY\nHLA-C*07:54,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:56,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:57,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:58,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:59,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:60,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:62,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:63,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:64,YSAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:65,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:66,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:67,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:68,YDSGYREKYRQADVSNLYLRSDSYTLAADAYTWY\nHLA-C*07:69,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:70,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:71,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:72,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:73,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:74,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:75,YDSGYREKYRQADVSNLHLRSDSYTLAALAYTWY\nHLA-C*07:76,YDSGYREKYRQADVNKLYLRSDSYTLAALAYTWY\nHLA-C*07:77,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:78,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:79,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:80,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:81,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:82,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:83,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:84,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:85,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:86,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:87,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:88,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:89,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:90,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:91,YDSGYRENYRQADVSNLYLRYDSYTLTALAYTWY\nHLA-C*07:92,YYAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:93,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:94,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:95,YDSGYRENYRQADVSNLYLRYDSYTLAVLAYTWY\nHLA-C*07:96,YYAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:97,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:99,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:100,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:101,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:102,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:103,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:105,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:106,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:107,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:108,YDSGYRENYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C*07:109,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:110,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:111,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:112,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:113,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:114,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:115,YDSGYRENYRQADVSDLYLRYDSYTLAALAYTWY\nHLA-C*07:116,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:117,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:118,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:119,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:120,YDSGYRENYRQADVSNLYLRYDSYTLAALAYPWY\nHLA-C*07:122,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:123,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:124,YDSGYRENYRQADESNLYLRYDSYTLAALAYTWY\nHLA-C*07:125,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:126,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:127,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:128,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:129,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:130,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:131,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:132,DDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:133,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:134,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:135,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:136,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:137,YDSGYREKYRQADVSNLYLRSDSYTLAALTYTWY\nHLA-C*07:138,YDSGYREKYRQADVSNLYLRSDSYTLAAWAYTWY\nHLA-C*07:139,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:140,YDSGYRENYRQADVSNLYLRYDSYTWAVDAYTWY\nHLA-C*07:141,YDSGYRENYRQADVSNLYLRYDSYTWAALAYTWY\nHLA-C*07:142,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:143,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:144,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:145,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:146,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:147,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:148,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:149,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*08:01,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:02,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:03,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:04,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C*08:05,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:06,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYAWY\nHLA-C*08:07,YYAGYREKYRQTDVSNLYLRYNFYTLAERAYTWY\nHLA-C*08:08,YYAGYREKYRQTDVSNLYLSYNFYTWATLAYTWY\nHLA-C*08:09,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C*08:10,YYAGYREKYRQTDVNKLYLRYNFYTWATLAYTWY\nHLA-C*08:11,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C*08:12,YYAGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C*08:13,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C*08:14,YYSGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:15,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:16,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:17,YYAGYREKYCQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:18,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:19,YYAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*08:20,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:21,YYAGYREKYRQADVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:22,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:23,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:24,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:25,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:27,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C*08:28,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:29,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C*08:30,YYAGYREKYRQTDVSNLYLRYNFYTWPERAYTWY\nHLA-C*08:31,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYEWY\nHLA-C*08:32,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:33,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:34,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:35,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*12:02,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:03,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:04,YYAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:05,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:06,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:07,YYAGYREKYRQADVGNLYLWYDSYTWAEWAYTWY\nHLA-C*12:08,YYAGYRENYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:09,YYAGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C*12:10,YYAGYREKYRQADVSNLYLRFDSYTWAEWAYTWY\nHLA-C*12:11,YYAGYREKYRQADVSNLYLWSDSYTWAEWAYTWY\nHLA-C*12:12,YYAGYREKYRQADESNLYLWYDSYTWAEWAYTWY\nHLA-C*12:13,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:14,YYAGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*12:15,YYAGYREKYRQADVSNLYLWYDLYTWAEWAYTWY\nHLA-C*12:16,YDSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:17,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:18,YYAGYREKYRQADVSNLYLRYDSYTWAELAYTWY\nHLA-C*12:19,YYAGYREKYRQADVSNLYLWYDSYTWAECAYTWY\nHLA-C*12:20,YYAGYREKYRQADVSNLYLWYDSYTWAELAYTWY\nHLA-C*12:21,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYTWY\nHLA-C*12:22,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:23,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:24,YYAGYREKYRQADVSNLYLWYDSYTWAERAYTWY\nHLA-C*12:25,YYAGYPEKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:26,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:27,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:28,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:29,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:30,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:31,YYAGYREKYRQADVSNLYLWYNFYTWAEWAYTWY\nHLA-C*12:32,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:33,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:34,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:35,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:36,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:37,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:38,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:40,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:41,YYAGYREKYRQADVNKLYLRYDSYTWAEWAYTWY\nHLA-C*12:43,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:44,YYAGYREKYRQADVSNLYIRYDSYTWAEWAYTWY\nHLA-C*14:02,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:03,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:04,YSAGYREKYRQADVNNLYLWFDSYTWAERAYTWY\nHLA-C*14:05,YSAGYREKYRQTDVSNLYLWYDSYTWAERAYTWY\nHLA-C*14:06,YSAGYREKYRQTDVSNLYLWFDSYTWAELAYTWY\nHLA-C*14:08,YSAGYREKYRQTDVSNLYPWFDSYTWAERAYTWY\nHLA-C*14:09,YSAGYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-C*14:10,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:11,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:12,YSAGYREKYRQTDVNKLYLWFDSYTWAERAYTWY\nHLA-C*14:13,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:14,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:15,YSAGYREKYRQTDVSNLYLWFDSYTWAALAYTWY\nHLA-C*14:16,YSAGYREKYRQTDVSNLYLWFDSYTWAEWAYTWY\nHLA-C*14:17,YSAGYREKYRQTDVSNLYLWFDSYTLAARAYTWY\nHLA-C*14:18,YSSGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:19,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:20,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYLWY\nHLA-C*15:02,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:03,YYAGYRENYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:04,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:05,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:06,YYAGYRENYRQTDVNKLYIRYDYYTWAELAYTWY\nHLA-C*15:07,YYAGYRENYRQTDVSNLYIRYDLYTWAELAYTWY\nHLA-C*15:08,YYAGYRENYRQTDVNKLYIRYDLYTWAERAYTWY\nHLA-C*15:09,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:10,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:11,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:12,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:13,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:15,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYLWY\nHLA-C*15:16,YYAGYREKYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:17,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:18,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:19,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:20,YYAGYREKYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:21,YYAGYRENYRQTDVSKLYIRYDLYTWAELAYTWY\nHLA-C*15:22,YYAGYRENYRQTDVNKLYLRYDFYTWAELAYTWY\nHLA-C*15:23,YDAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:24,YYAGYRENYRQTDVNKLYIRYNYYTWAELAYTWY\nHLA-C*15:25,YYAGYREKYRQADVSNLYIRYNFYTWAEDAYTSY\nHLA-C*15:26,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:27,YYAGYRNKYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:28,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:29,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:30,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:31,YYAGYRENYRQTDVNKLYIRYDLYTWAALAYTWY\nHLA-C*15:33,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:34,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:35,YYAGYRENYRQTDVNKLHIRYDLYTWAELAYTWY\nHLA-C*16:01,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:02,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:04,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C*16:06,YYAGYREKYRQTDVSNLYLRSDSYTWAAQAYTWY\nHLA-C*16:07,YYAGYREKYRQTDVSNLYLRYDSYTWAAQAYTWY\nHLA-C*16:08,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:09,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:10,YYAGYREKYRQTDVSNLYLWYDDYTWAAQAYTWY\nHLA-C*16:11,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:12,YYAGYGEKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:13,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:14,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:15,YYAGYREKYRQADVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:17,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:18,YYAGYREKYRQTDVSNLYLWCDSYTWAAQAYTWY\nHLA-C*16:19,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:20,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:21,YDAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:22,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:23,YYAGYREKYRQTDVSNLYLWFDSYTWAAQAYTWY\nHLA-C*16:24,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:25,YYAGYREKYRQADVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:26,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*17:01,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:02,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:03,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:04,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:05,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:06,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:07,YYAGYREKYRQADVNKLYIRYNFYSLAELAYLWY\nHLA-C*18:01,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*18:02,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*18:03,YDSGYREKYRQADVNKLYLRFNFYTWAEWAYEWY\n"
  },
  {
    "path": "downloads-generation/models_class1_kim_benchmark/curate.py",
    "content": "\"\"\"\nFilter and combine various peptide/MHC datasets to derive a composite training set,\noptionally including eluted peptides identified by mass-spec.\n\"\"\"\nimport sys\nimport argparse\n\nimport pandas\n\nfrom mhcflurry.common import normalize_allele_name\n\n\ndef normalize_allele_name_or_return_unknown(s):\n    result = normalize_allele_name(\n        s, raise_on_error=False, default_value=\"UNKNOWN\")\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data-kim2014\",\n    action=\"append\",\n    default=[],\n    help=\"Path to Kim 2014-style affinity data\")\nparser.add_argument(\n    \"--data-iedb\",\n    action=\"append\",\n    default=[],\n    help=\"Path to IEDB-style affinity data (e.g. mhc_ligand_full.csv)\")\nparser.add_argument(\n    \"--data-systemhc-atlas\",\n    action=\"append\",\n    default=[],\n    help=\"Path to systemhc-atlas-style mass-spec data\")\nparser.add_argument(\n    \"--data-abelin-mass-spec\",\n    action=\"append\",\n    default=[],\n    help=\"Path to Abelin Immunity 2017 mass-spec hits\")\nparser.add_argument(\n    \"--include-iedb-mass-spec\",\n    action=\"store_true\",\n    default=False,\n    help=\"Include mass-spec observations in IEDB\")\n\nparser.add_argument(\n    \"--out-csv\",\n    required=True,\n    help=\"Result file\")\n\nQUALITATIVE_TO_AFFINITY_AND_INEQUALITY = {\n    \"Negative\": (5000.0, \">\"),\n    \"Positive\": (500.0, \"<\"),  # used for mass-spec hits\n    \"Positive-High\": (100.0, \"<\"),\n    \"Positive-Intermediate\": (1000.0, \"<\"),\n    \"Positive-Low\": (5000.0, \"<\"),\n}\nQUALITATIVE_TO_AFFINITY = dict(\n    (key, value[0]) for (key, value)\n    in QUALITATIVE_TO_AFFINITY_AND_INEQUALITY.items())\nQUALITATIVE_TO_INEQUALITY = dict(\n    (key, value[1]) for (key, value)\n    in QUALITATIVE_TO_AFFINITY_AND_INEQUALITY.items())\n\n\nEXCLUDE_IEDB_ALLELES = [\n    \"HLA class I\",\n    \"HLA class II\",\n]\n\n\ndef load_data_kim2014(filename):\n    df = pandas.read_table(filename)\n    print(\"Loaded kim2014 data: %s\" % str(df.shape))\n    df[\"measurement_source\"] = \"kim2014\"\n    df[\"measurement_value\"] = df.meas\n    df[\"measurement_type\"] = (df.inequality == \"=\").map({\n        True: \"quantitative\",\n        False: \"qualitative\",\n    })\n    df[\"measurement_inequality\"] = df.inequality\n    df[\"original_allele\"] = df.mhc\n    df[\"peptide\"] = df.sequence\n    df[\"allele\"] = df.mhc.map(normalize_allele_name_or_return_unknown)\n    print(\"Dropping un-parseable alleles: %s\" % \", \".join(\n        df.ix[df.allele == \"UNKNOWN\"][\"mhc\"].unique()))\n    df = df.ix[df.allele != \"UNKNOWN\"]\n\n    print(\"Loaded kim2014 data: %s\" % str(df.shape))\n    return df\n\n\ndef load_data_systemhc_atlas(filename, min_probability=0.99):\n    df = pandas.read_csv(filename)\n    print(\"Loaded systemhc atlas data: %s\" % str(df.shape))\n\n    df[\"measurement_source\"] = \"systemhc-atlas\"\n    df[\"measurement_value\"] = QUALITATIVE_TO_AFFINITY[\"Positive\"]\n    df[\"measurement_inequality\"] = \"<\"\n    df[\"measurement_type\"] = \"qualitative\"\n    df[\"original_allele\"] = df.top_allele\n    df[\"peptide\"] = df.search_hit\n    df[\"allele\"] = df.top_allele.map(normalize_allele_name_or_return_unknown)\n\n    print(\"Dropping un-parseable alleles: %s\" % \", \".join(\n        str(x) for x in df.ix[df.allele == \"UNKNOWN\"][\"top_allele\"].unique()))\n    df = df.loc[df.allele != \"UNKNOWN\"]\n    print(\"Systemhc atlas data now: %s\" % str(df.shape))\n\n    print(\"Dropping data points with probability < %f\" % min_probability)\n    df = df.loc[df.prob >= min_probability]\n    print(\"Systemhc atlas data now: %s\" % str(df.shape))\n\n    print(\"Removing duplicates\")\n    df = df.drop_duplicates([\"allele\", \"peptide\"])\n    print(\"Systemhc atlas data now: %s\" % str(df.shape))\n\n    return df\n\n\ndef load_data_abelin_mass_spec(filename):\n    df = pandas.read_csv(filename)\n    print(\"Loaded Abelin mass-spec data: %s\" % str(df.shape))\n\n    df[\"measurement_source\"] = \"abelin-mass-spec\"\n    df[\"measurement_value\"] = QUALITATIVE_TO_AFFINITY[\"Positive\"]\n    df[\"measurement_inequality\"] = \"<\"\n    df[\"measurement_type\"] = \"qualitative\"\n    df[\"original_allele\"] = df.allele\n    df[\"allele\"] = df.original_allele.map(normalize_allele_name_or_return_unknown)\n\n    print(\"Dropping un-parseable alleles: %s\" % \", \".join(\n        str(x) for x in df.ix[df.allele == \"UNKNOWN\"][\"allele\"].unique()))\n    df = df.loc[df.allele != \"UNKNOWN\"]\n    print(\"Abelin mass-spec data now: %s\" % str(df.shape))\n\n    print(\"Removing duplicates\")\n    df = df.drop_duplicates([\"allele\", \"peptide\"])\n    print(\"Abelin mass-spec data now: %s\" % str(df.shape))\n\n    return df\n\n\ndef load_data_iedb(iedb_csv, include_qualitative=True, include_mass_spec=False):\n    iedb_df = pandas.read_csv(iedb_csv, skiprows=1, low_memory=False)\n    print(\"Loaded iedb data: %s\" % str(iedb_df.shape))\n\n    print(\"Selecting only class I\")\n    iedb_df = iedb_df.ix[\n        iedb_df[\"MHC allele class\"].str.strip().str.upper() == \"I\"\n    ]\n    print(\"New shape: %s\" % str(iedb_df.shape))\n\n    print(\"Dropping known unusuable alleles\")\n    iedb_df = iedb_df.ix[\n        ~iedb_df[\"Allele Name\"].isin(EXCLUDE_IEDB_ALLELES)\n    ]\n    iedb_df = iedb_df.ix[\n        (~iedb_df[\"Allele Name\"].str.contains(\"mutant\")) &\n        (~iedb_df[\"Allele Name\"].str.contains(\"CD1\"))\n    ]\n\n    iedb_df[\"allele\"] = iedb_df[\"Allele Name\"].map(normalize_allele_name_or_return_unknown)\n    print(\"Dropping un-parseable alleles: %s\" % \", \".join(\n        iedb_df.ix[iedb_df.allele == \"UNKNOWN\"][\"Allele Name\"].unique()))\n    iedb_df = iedb_df.ix[iedb_df.allele != \"UNKNOWN\"]\n\n    print(\"IEDB measurements per allele:\\n%s\" % iedb_df.allele.value_counts())\n\n    quantitative = iedb_df.ix[iedb_df[\"Units\"] == \"nM\"].copy()\n    quantitative[\"measurement_type\"] = \"quantitative\"\n    quantitative[\"measurement_inequality\"] = \"=\"\n    print(\"Quantitative measurements: %d\" % len(quantitative))\n\n    qualitative = iedb_df.ix[iedb_df[\"Units\"] != \"nM\"].copy()\n    qualitative[\"measurement_type\"] = \"qualitative\"\n    print(\"Qualitative measurements: %d\" % len(qualitative))\n    if not include_mass_spec:\n        qualitative = qualitative.ix[\n            (~qualitative[\"Method/Technique\"].str.contains(\"mass spec\"))\n        ].copy()\n\n    qualitative[\"Quantitative measurement\"] = (\n        qualitative[\"Qualitative Measure\"].map(QUALITATIVE_TO_AFFINITY))\n    qualitative[\"measurement_inequality\"] = (\n        qualitative[\"Qualitative Measure\"].map(QUALITATIVE_TO_INEQUALITY))\n\n    print(\"Qualitative measurements (possibly after dropping MS): %d\" % (\n        len(qualitative)))\n\n    iedb_df = pandas.concat(\n        (\n            ([quantitative]) +\n            ([qualitative] if include_qualitative else [])),\n        ignore_index=True)\n\n    print(\"IEDB measurements per allele:\\n%s\" % iedb_df.allele.value_counts())\n\n    print(\"Subselecting to valid peptides. Starting with: %d\" % len(iedb_df))\n    iedb_df[\"Description\"] = iedb_df.Description.str.strip()\n    iedb_df = iedb_df.ix[\n        iedb_df.Description.str.match(\"^[ACDEFGHIKLMNPQRSTVWY]+$\")\n    ]\n    print(\"Now: %d\" % len(iedb_df))\n\n    print(\"Annotating last author and category\")\n    iedb_df[\"last_author\"] = iedb_df.Authors.map(\n        lambda x: (\n            x.split(\";\")[-1]\n            .split(\",\")[-1]\n            .split(\" \")[-1]\n            .strip()\n            .replace(\"*\", \"\"))).values\n    iedb_df[\"category\"] = (\n        iedb_df[\"last_author\"] + \" - \" + iedb_df[\"Method/Technique\"]).values\n\n    train_data = pandas.DataFrame()\n    train_data[\"peptide\"] = iedb_df.Description.values\n    train_data[\"measurement_value\"] = iedb_df[\n        \"Quantitative measurement\"\n    ].values\n    train_data[\"measurement_source\"] = iedb_df.category.values\n    train_data[\"measurement_inequality\"] = iedb_df.measurement_inequality.values\n\n    train_data[\"allele\"] = iedb_df[\"allele\"].values\n    train_data[\"original_allele\"] = iedb_df[\"Allele Name\"].values\n    train_data[\"measurement_type\"] = iedb_df[\"measurement_type\"].values\n    train_data = train_data.drop_duplicates().reset_index(drop=True)\n\n    return train_data\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n\n    dfs = []\n    for filename in args.data_iedb:\n        df = load_data_iedb(filename, include_mass_spec=args.include_iedb_mass_spec)\n        dfs.append(df)\n    for filename in args.data_kim2014:\n        df = load_data_kim2014(filename)\n        df[\"allele_peptide\"] = df.allele + \"_\" + df.peptide\n\n        # Give precedence to IEDB data.\n        if dfs:\n            iedb_df = dfs[0]\n            iedb_df[\"allele_peptide\"] = iedb_df.allele + \"_\" + iedb_df.peptide\n            print(\"Dropping kim2014 data present in IEDB.\")\n            df = df.ix[\n                ~df.allele_peptide.isin(iedb_df.allele_peptide)\n            ]\n            print(\"Kim2014 data now: %s\" % str(df.shape))\n        dfs.append(df)\n    for filename in args.data_systemhc_atlas:\n        df = load_data_systemhc_atlas(filename)\n        dfs.append(df)\n    for filename in args.data_abelin_mass_spec:\n        df = load_data_abelin_mass_spec(filename)\n        dfs.append(df)\n\n    df = pandas.concat(dfs, ignore_index=True)\n    print(\"Combined df: %s\" % (str(df.shape)))\n\n    print(\"Removing combined duplicates\")\n    df = df.drop_duplicates([\"allele\", \"peptide\", \"measurement_value\"])\n    print(\"New combined df: %s\" % (str(df.shape)))\n\n    df = df[[\n        \"allele\",\n        \"peptide\",\n        \"measurement_value\",\n        \"measurement_inequality\",\n        \"measurement_type\",\n        \"measurement_source\",\n        \"original_allele\",\n    ]].sort_values([\"allele\", \"peptide\"]).dropna()\n\n    print(\"Final combined df: %s\" % (str(df.shape)))\n\n    df.to_csv(args.out_csv, index=False)\n    print(\"Wrote: %s\" % args.out_csv)\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/models_class1_kim_benchmark/generate_hyperparameters.py",
    "content": "\"\"\"\nGenerate grid of hyperparameters\n\"\"\"\n\nfrom sys import stdout\nfrom copy import deepcopy\nfrom yaml import dump\n\nbase_hyperparameters = {\n    ##########################################\n    # ENSEMBLE SIZE\n    ##########################################\n    \"n_models\": 4,\n\n    ##########################################\n    # OPTIMIZATION\n    ##########################################\n    \"max_epochs\": 500,\n    \"patience\": 20,\n    \"early_stopping\": True,\n    \"validation_split\": 0.1,\n    \"minibatch_size\": None,\n    \"loss\": \"custom:mse_with_inequalities\",\n\n    ##########################################\n    # RANDOM NEGATIVE PEPTIDES\n    ##########################################\n    \"random_negative_rate\": 0.0,\n    \"random_negative_constant\": 25,\n    \"random_negative_affinity_min\": 20000.0,\n    \"random_negative_affinity_max\": 50000.0,\n\n    ##########################################\n    # PEPTIDE REPRESENTATION\n    ##########################################\n    # One of \"one-hot\", \"embedding\", or \"BLOSUM62\".\n    \"peptide_amino_acid_encoding\": \"BLOSUM62\",\n    \"use_embedding\": False,  # maintained for backward compatability\n    \"embedding_output_dim\": 8,  # only used if using embedding\n    \"kmer_size\": 15,\n\n    ##########################################\n    # NEURAL NETWORK ARCHITECTURE\n    ##########################################\n    \"locally_connected_layers\": [\n        {\n            \"filters\": 8,\n            \"activation\": \"tanh\",\n            \"kernel_size\": 3\n        }\n    ],\n    \"activation\": \"tanh\",\n    \"output_activation\": \"sigmoid\",\n    \"layer_sizes\": [16],\n    \"dense_layer_l1_regularization\": None,\n    \"batch_normalization\": False,\n    \"dropout_probability\": 0.0,\n\n    ##########################################\n    # TRAINING Data\n    ##########################################\n    \"train_data\": {\"subset\": \"all\", \"pretrain_min_points\": 1000},\n}\n\ngrid = []\nfor train_subset in [\"all\", \"quantitative\"]:\n    for minibatch_size in [128]:\n        for dense_layer_size in [8, 16, 32, 64]:\n            for l1 in [0.0, 0.001]:\n                for num_lc in [0, 1, 2]:\n                    for lc_kernel_size in [3, 5]:\n                        new = deepcopy(base_hyperparameters)\n                        new[\"minibatch_size\"] = minibatch_size\n                        new[\"train_data\"][\"subset\"] = train_subset\n                        new[\"layer_sizes\"] = [dense_layer_size]\n                        new[\"dense_layer_l1_regularization\"] = l1\n                        (lc_layer,) = new[\"locally_connected_layers\"]\n                        lc_layer['kernel_size'] = lc_kernel_size\n                        if num_lc == 0:\n                            new[\"locally_connected_layers\"] = []\n                        elif num_lc == 1:\n                            new[\"locally_connected_layers\"] = [lc_layer]\n                        elif num_lc == 2:\n                            new[\"locally_connected_layers\"] = [lc_layer, deepcopy(lc_layer)]\n                        if not grid or new not in grid:\n                            grid.append(new)\n\ndump(grid, stdout)\n"
  },
  {
    "path": "downloads-generation/models_class1_kim_benchmark/write_validation_data.py",
    "content": "\"\"\"\nWrite and summarize model validation data, which is obtained by taking a full\ndataset and removing the data used for training.\n\n\"\"\"\nimport argparse\nimport sys\nfrom os.path import abspath\n\nimport pandas\nimport numpy\nfrom sklearn.model_selection import StratifiedKFold\n\nparser = argparse.ArgumentParser(usage = __doc__)\n\nparser.add_argument(\n    \"--include\",\n    metavar=\"INPUT.csv\",\n    nargs=\"+\",\n    help=\"Input CSV to include\")\nparser.add_argument(\n    \"--exclude\",\n    metavar=\"INPUT.csv\",\n    nargs=\"+\",\n    help=\"Input CSV to exclude\")\nparser.add_argument(\n    \"--out-data\",\n    metavar=\"RESULT.csv\",\n    help=\"Output dadta CSV\")\nparser.add_argument(\n    \"--out-summary\",\n    metavar=\"RESULT.csv\",\n    help=\"Output summary CSV\")\nparser.add_argument(\n    \"--mass-spec-regex\",\n    metavar=\"REGEX\",\n    default=\"mass[- ]spec\",\n    help=\"Regular expression for mass-spec data. Runs on measurement_source col.\"\n    \"Default: %(default)s.\")\nparser.add_argument(\n    \"--only-alleles-present-in-exclude\",\n    action=\"store_true\",\n    default=False,\n    help=\"Filter to only alleles that are present in files given by --exclude. \"\n    \"Useful for filtering to only alleles supported by a predictor, where the \"\n    \"training data for the predictor is given by --exclude.\")\n\n\ndef run(argv):\n    args = parser.parse_args(argv)\n\n    dfs = []\n    for input in args.include:\n        df = pandas.read_csv(input)\n        dfs.append(df)\n    df = pandas.concat(dfs, ignore_index=True)\n    print(\"Loaded data with shape: %s\" % str(df.shape))\n    del dfs\n\n    df = df.ix[\n        (df.peptide.str.len() >= 8) & (df.peptide.str.len() <= 15)\n    ]\n    print(\"Subselected to 8-15mers: %s\" % (str(df.shape)))\n\n    if args.exclude:\n        exclude_dfs = []\n        for exclude in args.exclude:\n            exclude_df = pandas.read_csv(exclude)\n            exclude_dfs.append(exclude_df)\n        exclude_df = pandas.concat(exclude_dfs, ignore_index=True)\n        del exclude_dfs\n\n        df[\"_key\"] = df.allele + \"__\" + df.peptide\n        exclude_df[\"_key\"] = exclude_df.allele + \"__\" + exclude_df.peptide\n        df[\"_excluded\"] = df._key.isin(exclude_df._key.unique())\n        print(\"Excluding measurements per allele (counts): \")\n        print(df.groupby(\"allele\")._excluded.sum())\n\n        print(\"Excluding measurements per allele (fractions): \")\n        print(df.groupby(\"allele\")._excluded.mean())\n\n        df = df.loc[~df._excluded]\n        del df[\"_excluded\"]\n        del df[\"_key\"]\n\n        if args.only_alleles_present_in_exclude:\n            df = df.loc[df.allele.isin(exclude_df.allele.unique())]\n\n    df[\"mass_spec\"] = df.measurement_source.str.contains(args.mass_spec_regex)\n    df.loc[df.mass_spec , \"measurement_inequality\"] = \"mass_spec\"\n\n    if args.out_summary:\n        summary_df = df.groupby(\n            [\"allele\", \"measurement_inequality\"]\n        )[\"measurement_value\"].count().unstack().fillna(0).astype(int)\n        summary_df[\"total\"] = summary_df.sum(1)\n        summary_df.to_csv(args.out_summary)\n        print(\"Wrote: %s\" % args.out_summary)\n\n    if args.out_data:\n        df.to_csv(args.out_data, index=False)\n        print(\"Wrote: %s\" % args.out_data)\n\nif __name__ == '__main__':\n    run(sys.argv[1:])\n"
  },
  {
    "path": "downloads-generation/models_class1_minimal/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Model select standard MHCflurry Class I models, limiting to 1 model per allele.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_minimal\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\ncp $SCRIPT_DIR/write_validation_data.py .\n\nmkdir models\n\nGPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\necho \"Detected GPUS: $GPUS\"\n\nPROCESSORS=$(getconf _NPROCESSORS_ONLN)\necho \"Detected processors: $PROCESSORS\"\n\npython ./write_validation_data.py \\\n    --include \"$(mhcflurry-downloads path data_curated)/curated_training_data.with_mass_spec.csv.bz2\" \\\n    --exclude \"$(mhcflurry-downloads path models_class1_unselected)/models/train_data.csv.bz2\" \\\n    --only-alleles-present-in-exclude \\\n    --out-data test.csv \\\n    --out-summary test.summary.csv\n\nwc -l test.csv\n\ntime mhcflurry-class1-select-allele-specific-models \\\n    --data test.csv \\\n    --models-dir \"$(mhcflurry-downloads path models_class1_unselected)/models\" \\\n    --out-models-dir models \\\n    --scoring combined:mass-spec,mse,consensus \\\n    --consensus-num-peptides-per-length 10000 \\\n    --combined-min-models 1 \\\n    --combined-max-models 1 \\\n    --unselected-accuracy-scorer combined:mass-spec,mse \\\n    --unselected-accuracy-percentile-threshold 95 \\\n    --mass-spec-min-measurements 500 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 1\n\ntime mhcflurry-calibrate-percentile-ranks \\\n    --models-dir models \\\n    --num-peptides-per-length 100000 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50\n\n# To save space in the final download\nrm test.csv\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/models_class1_minimal/README.md",
    "content": "# Class I allele-specific models (minimal: ensemble size of 1)\n\nThis download contains \"minimal\" MHC Class I MHCflurry predictors consisting\nof a single model per allele. These predictors are expected to have slightly\nlower accuracy than the standard ensembles (models_class1) but are small and\nfast. Useful for testing.\n\nTo download these models and set them as the default predictor, run:\n\n```\n$ mhcflurry-downloads fetch models_class1_minimal\n$ export MHCFLURRY_DEFAULT_CLASS1_MODELS=$(mhcflurry-downloads path models_class1_minimal)/models\n```"
  },
  {
    "path": "downloads-generation/models_class1_pan/GENERATE.WITH_HPC_CLUSTER.sh",
    "content": "bash GENERATE.sh cluster\n"
  },
  {
    "path": "downloads-generation/models_class1_pan/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Train pan-allele MHCflurry Class I models. Supports re-starting a failed run.\n#\n# Usage: GENERATE.sh <local|cluster> <fresh|continue-incomplete>\n#\n# cluster mode uses an HPC cluster (Mount Sinai chimera cluster, which uses lsf job\n# scheduler). This would need to be modified for other sites.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_pan\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nif [ \"$1\" != \"cluster\" ]\nthen\n    GPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\n    echo \"Detected GPUS: $GPUS\"\n\n    PROCESSORS=$(getconf _NPROCESSORS_ONLN)\n    echo \"Detected processors: $PROCESSORS\"\n\n    if [ \"$GPUS\" -eq \"0\" ]; then\n       NUM_JOBS=${NUM_JOBS-1}\n    else\n        NUM_JOBS=${NUM_JOBS-$GPUS}\n    fi\n    echo \"Num jobs: $NUM_JOBS\"\n    PARALLELISM_ARGS+=\" --num-jobs $NUM_JOBS --max-tasks-per-worker 1 --gpus $GPUS --max-workers-per-gpu 1\"\nelse\n    PARALLELISM_ARGS+=\" --cluster-parallelism --cluster-max-retries 1 --cluster-submit-command bsub --cluster-results-workdir $HOME/mhcflurry-scratch --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.lsf\"\nfi\n\nmkdir -p \"$SCRATCH_DIR\"\nif [ \"$2\" != \"continue-incomplete\" ]\nthen\n    echo \"Fresh run\"\n    rm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n    mkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nelse\n    echo \"Continuing incomplete run\"\nfi\n\n# Send stdout and stderr to a logfile included with the archive.\nLOG=\"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.$(date +%s).txt\"\nexec >  >(tee -ia \"$LOG\")\nexec 2> >(tee -ia \"$LOG\" >&2)\n\n# Log some environment info\necho \"Invocation: $0 $@\"\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nexport OMP_NUM_THREADS=1\nexport PYTHONUNBUFFERED=1\n\ncp $SCRIPT_DIR/additional_alleles.txt .\n\nif [ \"$2\" != \"continue-incomplete\" ]\nthen\n    cp $SCRIPT_DIR/generate_hyperparameters.py .\n    python generate_hyperparameters.py > hyperparameters.yaml\nfi\n\ncp $SCRIPT_DIR/reassign_mass_spec_training_data.py .\npython reassign_mass_spec_training_data.py \\\n    \"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\" \\\n    --set-measurement-value 100 \\\n    --out-csv \"$(pwd)/train_data.csv\"\nbzip2 -f \"$(pwd)/train_data.csv\"\nTRAINING_DATA=\"$(pwd)/train_data.csv.bz2\"\n\nfor kind in combined\ndo\n    CONTINUE_INCOMPLETE_ARGS=\"\"\n    if [ \"$2\" == \"continue-incomplete\" ] && [ -d \"models.unselected.${kind}\" ]\n    then\n        echo \"Will continue existing run: $kind\"\n        CONTINUE_INCOMPLETE_ARGS=\"--continue-incomplete\"\n    fi\n\n    ALLELE_SEQUENCES=\"$(mhcflurry-downloads path allele_sequences)/allele_sequences.csv\"\n    HYPERPARAMETERS=\"hyperparameters.yaml\"\n\n    mhcflurry-class1-train-pan-allele-models \\\n        --data \"$TRAINING_DATA\" \\\n        --allele-sequences \"$ALLELE_SEQUENCES\" \\\n        --pretrain-data \"$(mhcflurry-downloads path random_peptide_predictions)/predictions.csv.bz2\" \\\n        --held-out-measurements-per-allele-fraction-and-max 0.25 100 \\\n        --num-folds 4 \\\n        --hyperparameters \"$HYPERPARAMETERS\" \\\n        --out-models-dir $(pwd)/models.unselected.${kind} \\\n        --worker-log-dir \"$SCRATCH_DIR/$DOWNLOAD_NAME\" \\\n        $PARALLELISM_ARGS $CONTINUE_INCOMPLETE_ARGS\ndone\n\necho \"Done training. Beginning model selection.\"\n\nfor kind in combined\ndo\n    MODELS_DIR=\"models.unselected.${kind}\"\n\n    # Older method calibrated only particular alleles. We are now calibrating\n    # all alleles, so this is commented out.\n    #ALLELE_LIST=$(bzcat \"$MODELS_DIR/train_data.csv.bz2\" | cut -f 1 -d , | grep -v allele | uniq | sort | uniq)\n    #ALLELE_LIST+=$(echo \" \" $(cat additional_alleles.txt | grep -v '#') )\n\n    mhcflurry-class1-select-pan-allele-models \\\n        --data \"$MODELS_DIR/train_data.csv.bz2\" \\\n        --models-dir \"$MODELS_DIR\" \\\n        --out-models-dir models.${kind} \\\n        --min-models 2 \\\n        --max-models 8 \\\n        $PARALLELISM_ARGS\n    cp \"$MODELS_DIR/train_data.csv.bz2\" \"models.${kind}/train_data.csv.bz2\"\n\n    # We are now calibrating all alleles.\n    # Previously had argument:  --allele $ALLELE_LIST\n    time mhcflurry-calibrate-percentile-ranks \\\n        --models-dir models.${kind} \\\n        --match-amino-acid-distribution-data \"$MODELS_DIR/train_data.csv.bz2\" \\\n        --motif-summary \\\n        --num-peptides-per-length 100000 \\\n        --alleles-per-work-chunk 10 \\\n        --verbosity 1 \\\n        $PARALLELISM_ARGS\n\ndone\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 -f \"$LOG\"\nfor i in $(ls LOG-worker.*.txt) ; do bzip2 -f $i ; done\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n\n# Split into <2GB chunks for GitHub\nPARTS=\"${RESULT}.part.\"\n# Check for pre-existing part files and rename them.\nfor i in $(ls \"${PARTS}\"* )\ndo\n    DEST=\"${i}.OLD.$(date +%s)\"\n    echo \"WARNING: already exists: $i . Moving to $DEST\"\n    mv $i $DEST\ndone\nsplit -b 2000M \"$RESULT\" \"$PARTS\"\necho \"Split into parts:\"\nls -lh \"${PARTS}\"*\n\n# Write out just the selected models\n# Move unselected into a hidden dir so it is excluded in the glob (*).\nmkdir .ignored\nmv models.unselected.* .ignored/\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.selected.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\nmv .ignored/* . && rmdir .ignored\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/models_class1_pan/README.md",
    "content": "# Class I pan-allele models (ensemble)\n\nThis download contains trained MHC Class I MHCflurry models before model selection.\n\nTo generate this download run:\n\n```\n./GENERATE.sh\n```\n"
  },
  {
    "path": "downloads-generation/models_class1_pan/additional_alleles.txt",
    "content": "# Additional alleles besides those in the training data to include in percentile rank calibration\nHLA-C*02:10\nHLA-A*02:20"
  },
  {
    "path": "downloads-generation/models_class1_pan/cluster_submit_script_header.mssm_hpc.lsf",
    "content": "#!/bin/bash\n#BSUB -J MHCf-{work_item_num} # Job name\n#BSUB -P acc_nkcancer # allocation account or Unix group\n#BSUB -q gpu # queue\n#BSUB -gpu \"num=1:mode=exclusive_process:mps=no:j_exclusive=yes\"\n#BSUB -R span[hosts=1] # one node\n#BSUB -n 1 # number of compute cores\n#BSUB -W 46:00 # walltime in HH:MM\n#BSUB -R rusage[mem=30000] # mb memory requested\n#BSUB -o {work_dir}/%J.stdout # output log (%J : JobID)\n#BSUB -eo {work_dir}/STDERR # error log\n#BSUB -L /bin/bash # Initialize the execution environment\n#\n\nset -e\nset -x\n\necho \"Subsequent stderr output redirected to stdout\" >&2\nexec 2>&1\n\nexport TMPDIR=/local/JOBS/mhcflurry-{work_item_num}\nexport PATH=$HOME/mhcflurry-conda-environment/bin/:$PATH\nexport PYTHONUNBUFFERED=1\nexport KMP_SETTINGS=1\n#export TF_GPU_ALLOCATOR=cuda_malloc_async\n\nfree -m\n\nmodule add cuda/11.8.0 cudnn/8.9.5-11\nmodule list\n\nnvidia-smi\n\nexport XLA_FLAGS=--xla_gpu_cuda_data_dir=$CUDA_PATH\n#export XLA_FLAGS='--xla_compile=False'\n\npython -c 'import tensorflow as tf ; print(\"GPU AVAILABLE\" if tf.test.is_gpu_available() else \"GPU NOT AVAILABLE\")'\n\nenv\n\ncd {work_dir}\n\n"
  },
  {
    "path": "downloads-generation/models_class1_pan/generate_hyperparameters.py",
    "content": "\"\"\"\nGenerate grid of hyperparameters\n\"\"\"\n\nfrom sys import stdout\nfrom copy import deepcopy\nfrom yaml import dump\n\nbase_hyperparameters = {\n    'activation': 'tanh',\n    'allele_dense_layer_sizes': [],\n    'batch_normalization': False,\n    'dense_layer_l1_regularization': 0.0,\n    'dense_layer_l2_regularization': 0.0,\n    'dropout_probability': 0.5,\n    'early_stopping': True,\n    'init': 'glorot_uniform',\n    'layer_sizes': [1024, 512],\n    'learning_rate': 0.001,\n    'locally_connected_layers': [],\n    'topology': 'feedfoward',\n    'loss': 'custom:mse_with_inequalities',\n    'max_epochs': 5000,\n    'minibatch_size': 128,\n    'optimizer': 'rmsprop',\n    'output_activation': 'sigmoid',\n    \"patience\": 20,\n    \"min_delta\": 0.0,\n    'peptide_encoding': {\n        'vector_encoding_name': 'BLOSUM62',\n        'alignment_method': 'left_pad_centered_right_pad',\n        'max_length': 15,\n    },\n    'peptide_allele_merge_activation': '',\n    'peptide_allele_merge_method': 'concatenate',\n    'peptide_amino_acid_encoding': 'BLOSUM62',\n    'peptide_dense_layer_sizes': [],\n    'random_negative_affinity_max': 50000.0,\n    'random_negative_affinity_min': 30000.0,\n    'random_negative_constant': 1,\n    'random_negative_distribution_smoothing': 0.0,\n    'random_negative_match_distribution': True,\n    'random_negative_rate': 1.0,\n    'random_negative_method': 'by_allele_equalize_nonbinders',\n    'random_negative_binder_threshold': 500.0,\n    'train_data': {\n        'pretrain': True,\n        'pretrain_peptides_per_epoch': 64,\n        'pretrain_steps_per_epoch': 256,\n        'pretrain_patience': 2,\n        'pretrain_min_delta': 0.0001,\n        'pretrain_max_val_loss': 0.10,\n        'pretrain_max_epochs': 50,\n        'pretrain_min_epochs': 5,\n    },\n    'validation_split': 0.1,\n    'data_dependent_initialization_method': \"lsuv\",\n}\n\ngrid = []\nfor layer_sizes in [[512, 256], [512, 512], [1024, 512], [1024, 1024]]:\n    l1_base = 0.0000001\n    for l1 in [l1_base, l1_base / 10, l1_base / 100, l1_base / 1000, 0.0]:\n        new = deepcopy(base_hyperparameters)\n        new[\"topology\"] = 'feedforward'\n        new[\"layer_sizes\"] = layer_sizes\n        new[\"dense_layer_l1_regularization\"] = l1\n        if not grid or new not in grid:\n            grid.append(new)\n\nfor layer_sizes in [[256, 512], [256, 256, 512], [256, 512, 512]]:\n    l1_base = 0.0000001\n    for l1 in [l1_base, l1_base / 10, l1_base / 100, l1_base / 1000, 0.0]:\n        new = deepcopy(base_hyperparameters)\n        new[\"topology\"] = 'with-skip-connections'\n        new[\"layer_sizes\"] = layer_sizes\n        new[\"dense_layer_l1_regularization\"] = l1\n        if not grid or new not in grid:\n            grid.append(new)\n\ndump(grid, stdout)\n"
  },
  {
    "path": "downloads-generation/models_class1_pan/reassign_mass_spec_training_data.py",
    "content": "\"\"\"\nReassign affinity values for mass spec data\n\"\"\"\nimport sys\nimport os\nimport argparse\n\nimport pandas\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\"data\", metavar=\"CSV\", help=\"Training data\")\nparser.add_argument(\"--ms-only\", action=\"store_true\", default=False)\nparser.add_argument(\"--drop-negative-ms\", action=\"store_true\", default=False)\nparser.add_argument(\"--set-measurement-value\", type=float)\nparser.add_argument(\"--out-csv\")\n\npandas.set_option('display.max_columns', 500)\n\n\ndef go(args):\n    df = pandas.read_csv(args.data)\n    print(df)\n\n    if args.drop_negative_ms:\n        bad = df.loc[\n            (df.measurement_kind == \"mass_spec\") &\n            (df.measurement_inequality != \"<\")\n        ]\n        print(\"Dropping \", len(bad))\n        df = df.loc[~df.index.isin(bad.index)].copy()\n\n    if args.ms_only:\n        print(\"Filtering to MS only\")\n        df = df.loc[df.measurement_kind == \"mass_spec\"].copy()\n\n    if args.set_measurement_value is not None:\n        indexer = df.measurement_kind == \"mass_spec\"\n        df.loc[\n            indexer,\n            \"measurement_value\"\n        ] = args.set_measurement_value\n        print(\"Reassigned:\")\n        print(df.loc[indexer])\n\n    if args.out_csv:\n        out_csv = os.path.abspath(args.out_csv)\n        df.to_csv(out_csv, index=False)\n        print(\"Wrote\", out_csv)\n\n\nif __name__ == \"__main__\":\n    go(parser.parse_args(sys.argv[1:]))\n"
  },
  {
    "path": "downloads-generation/models_class1_pan_variants/GENERATE.WITH_HPC_CLUSTER.sh",
    "content": "bash GENERATE.sh cluster\n"
  },
  {
    "path": "downloads-generation/models_class1_pan_variants/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Uses an HPC cluster (Mount Sinai chimera cluster, which uses lsf job\n# scheduler). This would need to be modified for other sites.\n#\n# Usage: GENERATE.sh <local|cluster> <fresh|continue-incomplete>\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_pan_variants\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nif [ \"$1\" != \"cluster\" ]\nthen\n    GPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\n    echo \"Detected GPUS: $GPUS\"\n\n    PROCESSORS=$(getconf _NPROCESSORS_ONLN)\n    echo \"Detected processors: $PROCESSORS\"\n\n    if [ \"$GPUS\" -eq \"0\" ]; then\n       NUM_JOBS=${NUM_JOBS-1}\n    else\n        NUM_JOBS=${NUM_JOBS-$GPUS}\n    fi\n    echo \"Num jobs: $NUM_JOBS\"\n    PARALLELISM_ARGS+=\" --num-jobs $NUM_JOBS --max-tasks-per-worker 1 --gpus $GPUS --max-workers-per-gpu 1\"\nelse\n    PARALLELISM_ARGS+=\" --cluster-parallelism --cluster-max-retries 3 --cluster-submit-command bsub --cluster-results-workdir $HOME/mhcflurry-scratch --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.gpu.lsf\"\nfi\n\nmkdir -p \"$SCRATCH_DIR\"\nif [ \"$2\" != \"continue-incomplete\" ]\nthen\n    echo \"Fresh run\"\n    rm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n    mkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nelse\n    echo \"Continuing incomplete run\"\nfi\n\n# Send stdout and stderr to a logfile included with the archive.\nLOG=\"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.$(date +%s).txt\"\nexec >  >(tee -ia \"$LOG\")\nexec 2> >(tee -ia \"$LOG\" >&2)\n\n# Log some environment info\necho \"Invocation: $0 $@\"\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nexport OMP_NUM_THREADS=1\nexport PYTHONUNBUFFERED=1\n\nif [ \"$2\" != \"continue-incomplete\" ]\nthen\n    cp $SCRIPT_DIR/generate_hyperparameters.production.py .\n    cp $SCRIPT_DIR/generate_hyperparameters.py .\n    cp $SCRIPT_DIR/reassign_mass_spec_training_data.py .\n    cp $SCRIPT_DIR/exclude_data_from_training.py .\n    python generate_hyperparameters.production.py > hyperparameters.production.yaml\n    python generate_hyperparameters.py hyperparameters.production.yaml no_pretrain > hyperparameters.no_pretrain.yaml\n    python generate_hyperparameters.py hyperparameters.no_pretrain.yaml single_hidden > hyperparameters.single_hidden_no_pretrain.yaml\n    python generate_hyperparameters.py hyperparameters.production.yaml compact_peptide > hyperparameters.compact_peptide.yaml\nfi\n\n#VARIANTS=( no_additional_ms_ms_only_0nm ms_only_0nm no_additional_ms_0nm 0nm 500nm no_additional_ms no_pretrain compact_peptide 34mer_sequence single_hidden_no_pretrain affinity_only )\nVARIANTS=( no_additional_ms )\n\n\n\nfor kind in \"${VARIANTS[@]}\"\ndo\n    CONTINUE_INCOMPLETE_ARGS=\"\"\n    if [ \"$2\" == \"continue-incomplete\" ] && [ -d \"models.unselected.${kind}\" ]\n    then\n        echo \"Will continue existing run: $kind\"\n        CONTINUE_INCOMPLETE_ARGS=\"--continue-incomplete\"\n    fi\n\n    ALLELE_SEQUENCES=\"$(mhcflurry-downloads path allele_sequences)/allele_sequences.csv\"\n    HYPERPARAMETERS=hyperparameters.$kind.yaml\n    if [ \"$kind\" == \"34mer_sequence\" ]\n    then\n        ALLELE_SEQUENCES=\"$(mhcflurry-downloads path allele_sequences)/allele_sequences.no_differentiation.csv\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    TRAINING_DATA=\"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\"\n    if [ \"$kind\" == \"no_additional_ms\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python reassign_mass_spec_training_data.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.no_additional_ms.csv.bz2\" \\\n            --set-measurement-value 100 \\\n            --out-csv \"$TRAINING_DATA\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"no_additional_ms_ms_only_0nm\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python reassign_mass_spec_training_data.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.no_additional_ms.csv.bz2\" \\\n            --set-measurement-value 0 \\\n            --drop-negative-ms \\\n            --ms-only \\\n            --out-csv \"$TRAINING_DATA\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"no_additional_ms_0nm\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python reassign_mass_spec_training_data.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.no_additional_ms.csv.bz2\" \\\n            --set-measurement-value 0 \\\n            --out-csv \"$TRAINING_DATA\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"0nm\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python reassign_mass_spec_training_data.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\" \\\n            --set-measurement-value 0 \\\n            --out-csv \"$TRAINING_DATA\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"500nm\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python reassign_mass_spec_training_data.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\" \\\n            --set-measurement-value 500 \\\n            --out-csv \"$TRAINING_DATA\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"50nm\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python reassign_mass_spec_training_data.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\" \\\n            --set-measurement-value 50 \\\n            --out-csv \"$TRAINING_DATA\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"ms_only_0nm\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python reassign_mass_spec_training_data.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.mass_spec.csv.bz2\" \\\n            --set-measurement-value 0 \\\n            --drop-negative-ms \\\n            --out-csv \"$TRAINING_DATA\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"affinity_only\" ]\n    then\n        TRAINING_DATA=\"$(mhcflurry-downloads path data_curated)/curated_training_data.affinity.csv.bz2\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"exclude_epitopes\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python exclude_data_from_training.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\" \\\n            --remove-filename \"$(mhcflurry-downloads path data_published)/epitopes/30377561/2018ONCOIMM0037R-file002.xlsx\" \\\n            --remove-kind 30377561 \\\n            --out \"$TRAINING_DATA\" \\\n            --out-removed \"removed_train_data.$kind.csv\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    if [ \"$kind\" == \"exclude_epitopes_50nm\" ]\n    then\n        TRAINING_DATA=\"train_data.$kind.csv\"\n        python exclude_data_from_training.py \\\n            \"$(mhcflurry-downloads path data_curated)/curated_training_data.csv.bz2\" \\\n            --remove-filename \"$(mhcflurry-downloads path data_published)/epitopes/30377561/2018ONCOIMM0037R-file002.xlsx\" \\\n            --remove-kind 30377561 \\\n            --out \"$TRAINING_DATA\" \\\n            --out-removed \"removed_train_data.$kind.csv\"\n        python reassign_mass_spec_training_data.py \\\n            \"$TRAINING_DATA\" \\\n            --set-measurement-value 50 \\\n            --out-csv \"$TRAINING_DATA\"\n        HYPERPARAMETERS=hyperparameters.production.yaml\n    fi\n\n    mhcflurry-class1-train-pan-allele-models \\\n        --data \"$TRAINING_DATA\" \\\n        --allele-sequences \"$ALLELE_SEQUENCES\" \\\n        --pretrain-data \"$(mhcflurry-downloads path random_peptide_predictions)/predictions.csv.bz2\" \\\n        --held-out-measurements-per-allele-fraction-and-max 0.25 100 \\\n        --num-folds 4 \\\n        --hyperparameters \"$HYPERPARAMETERS\" \\\n        --out-models-dir $(pwd)/models.unselected.${kind} \\\n        --worker-log-dir \"$SCRATCH_DIR/$DOWNLOAD_NAME\" \\\n        $PARALLELISM_ARGS $CONTINUE_INCOMPLETE_ARGS\ndone\n\necho \"Done training. Beginning model selection.\"\n\nfor kind in \"${VARIANTS[@]}\"\ndo\n    MODELS_DIR=\"models.unselected.${kind}\"\n    mhcflurry-class1-select-pan-allele-models \\\n        --data \"$MODELS_DIR/train_data.csv.bz2\" \\\n        --models-dir \"$MODELS_DIR\" \\\n        --out-models-dir models.${kind} \\\n        --min-models 2 \\\n        --max-models 8 \\\n        $PARALLELISM_ARGS\n    cp \"$MODELS_DIR/train_data.csv.bz2\" \"models.${kind}/train_data.csv.bz2\"\ndone\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 -f \"$LOG\"\nfor i in $(ls LOG-worker.*.txt) ; do bzip2 -f $i ; done\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n\n# Split into <2GB chunks for GitHub\nPARTS=\"${RESULT}.part.\"\n# Check for pre-existing part files and rename them.\nfor i in $(ls \"${PARTS}\"* )\ndo\n    DEST=\"${i}.OLD.$(date +%s)\"\n    echo \"WARNING: already exists: $i . Moving to $DEST\"\n    mv $i $DEST\ndone\nsplit -b 2000M \"$RESULT\" \"$PARTS\"\necho \"Split into parts:\"\nls -lh \"${PARTS}\"*\n\n# Write out just the selected models\n# Move unselected into a hidden dir so it is excluded in the glob (*).\nmkdir .ignored\nmv models.unselected.* .ignored/\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.selected.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\nmv .ignored/* . && rmdir .ignored\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/models_class1_pan_variants/cluster_submit_script_header.mssm_hpc.gpu.lsf",
    "content": "#!/bin/bash\n#BSUB -J MHCf-{work_item_num} # Job name\n#BSUB -P acc_nkcancer # allocation account or Unix group\n#BSUB -q gpu # queue\n#BSUB -R rusage[ngpus_excl_p=1]  # 1 exclusive GPU\n#BSUB -R span[hosts=1] # one node\n#BSUB -n 1 # number of compute cores\n#BSUB -W 46:00 # walltime in HH:MM\n#BSUB -R rusage[mem=30000] # mb memory requested\n#BSUB -o {work_dir}/%J.stdout # output log (%J : JobID)\n#BSUB -eo {work_dir}/STDERR # error log\n#BSUB -L /bin/bash # Initialize the execution environment\n#\n\nset -e\nset -x\n\necho \"Subsequent stderr output redirected to stdout\" >&2\nexec 2>&1\n\nexport TMPDIR=/local/JOBS/mhcflurry-{work_item_num}\nexport PATH=$HOME/.conda/envs/py36b/bin/:$PATH\nexport PYTHONUNBUFFERED=1\nexport KMP_SETTINGS=1\nexport NETMHC_BUNDLE_HOME=$HOME/sinai/git/netmhc-bundle\nexport NETMHC_BUNDLE_TMPDIR=/local/JOBS/netmhctmp-{work_item_num}\nexport PATH=$NETMHC_BUNDLE_HOME/bin:$PATH\n\nfree -m\n\nmodule add cuda/10.1.105 cudnn/7.6.5\nmodule list\n\n# python -c 'import tensorflow as tf ; print(\"GPU AVAILABLE\" if tf.test.is_gpu_available() else \"GPU NOT AVAILABLE\")'\n\nenv\n\ncd {work_dir}\n\n"
  },
  {
    "path": "downloads-generation/models_class1_pan_variants/exclude_data_from_training.py",
    "content": "\"\"\"\nExtract allele/peptide pairs to exclude from training data.\n\"\"\"\nimport sys\nimport os\nimport argparse\n\nimport pandas\n\nfrom mhcflurry.common import normalize_allele_name\n\n\ndef normalize_allele_name_or_return_unknown(s):\n    return normalize_allele_name(\n        s, raise_on_error=False, default_value=\"UNKNOWN\")\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\"data\", metavar=\"CSV\", help=\"Training data\")\nparser.add_argument(\n    \"--remove-filename\",\n    action=\"append\",\n    default=[],\n    metavar=\"NAME\",\n    help=\"Data to drop\",\n    required=True)\nparser.add_argument(\n    \"--remove-kind\",\n    action=\"append\",\n    default=[],\n    metavar=\"KIND\",\n    help=\"Format of data to drop. For published data, use the PMID.\",\n    choices=[\n        \"30377561\"  # Koşaloğlu-Yalçın, ..., Peters. Oncoimmunology 2018 [PMID 30377561]\n    ],\n    required=True)\nparser.add_argument(\"--out\", metavar=\"CSV\", help=\"Result data path\")\nparser.add_argument(\n    \"--out-removed\", metavar=\"CSV\", help=\"Write removed data to given path\")\n\n\npandas.set_option('display.max_columns', 500)\n\n\nLOADERS = {}\n\n\ndef load_30377561(filename):\n    # Koşaloğlu-Yalçın, ..., Peters. Oncoimmunology 2018 [PMID 30377561]\n    dfs = pandas.read_excel(filename, sheet_name=None)\n\n    df1 = dfs['Supp Table 5 positive & random']\n\n    result_df = []\n    result_df.append(df1.rename(\n        columns={\n            \"mt.pep\": \"peptide\",\n            \"hla\": \"allele\",\n        })[[\"allele\", \"peptide\"]])\n    result_df.append(df1.rename(\n        columns={\n            \"wt.pep\": \"peptide\",\n            \"hla\": \"allele\",\n        })[[\"allele\", \"peptide\"]])\n\n\n    df2 = dfs[\"Supp Table 4 viral epitopes\"]\n\n    result_df.append(\n        df2.rename(\n            columns={\n                \"Epitope\": \"peptide\", \"Restriction\": \"allele\",\n        })[[\"allele\", \"peptide\"]])\n\n    result_df = pandas.concat(result_df, ignore_index=True)\n    return result_df\n\n\nLOADERS[\"30377561\"] = load_30377561\n\n\ndef go(args):\n    df = pandas.read_csv(args.data)\n    print(\"Read training data of length %d: \" % len(df))\n    print(df)\n\n    df[\"allele_peptide\"] = df.allele + \"~\" + df.peptide\n\n    if len(args.remove_kind) != len(args.remove_filename):\n        parser.error(\n            \"Number of arguments mismatch: --remove-kind [%d] != \"\n            \"--remove-filename [%d]\" % (\n                len(args.remove_kind),\n                len(args.remove_filename)))\n\n    removed = []\n\n    for (i, (kind, path)) in enumerate(\n            zip(args.remove_kind, args.remove_filename)):\n        print(\n            \"Processing file %d / %d: %s %s\" % (\n                i + 1, len(args.remove_kind), kind, path))\n        to_remove = LOADERS[kind](path)\n        print(\"Remove data contains %d entries\" % len(to_remove))\n\n        to_remove[\"normalized_allele\"] = to_remove.allele.map(\n            normalize_allele_name_or_return_unknown)\n\n        remove_allele_peptides = set(\n            to_remove.normalized_allele + \"~\" + to_remove.peptide)\n\n        remove_mask = df.allele_peptide.isin(remove_allele_peptides)\n        print(\"Will remove %d entries.\" % remove_mask.sum())\n\n        removed.append(df.loc[remove_mask].copy())\n        df = df.loc[~remove_mask].copy()\n\n        print(\"New training data size: %d\" % len(df))\n\n    print(\"Done processing.\")\n\n    removed_df = pandas.concat(removed)\n    print(\"Removed %d entries in total:\" % len(removed_df))\n    print(removed_df)\n\n    if args.out_removed:\n        removed_df.to_csv(args.out_removed, index=False)\n        print(\"Wrote: \", args.out_removed)\n\n    if args.out:\n        df.to_csv(args.out, index=False)\n        print(\"Wrote: \", args.out)\n\n\nif __name__ == \"__main__\":\n    go(parser.parse_args(sys.argv[1:]))\n"
  },
  {
    "path": "downloads-generation/models_class1_pan_variants/generate_hyperparameters.py",
    "content": "\"\"\"\nGenerate grid of hyperparameters\n\"\"\"\n\nfrom sys import stdout, argv\nfrom copy import deepcopy\nfrom yaml import dump, load\nimport argparse\n\nparser = argparse.ArgumentParser(usage=__doc__)\nparser.add_argument(\n    \"production_hyperparameters\",\n    metavar=\"data.json\",\n    help=\"Production (i.e. standard) hyperparameters grid.\")\nparser.add_argument(\n    \"kind\",\n    choices=('single_hidden', 'no_pretrain', 'compact_peptide'),\n    help=\"Hyperameters variant to output\")\n\nargs = parser.parse_args(argv[1:])\n\nwith open(args.production_hyperparameters) as fd:\n    production_hyperparameters_list = load(fd)\n\n\ndef transform_to_single_hidden(hyperparameters):\n    result = []\n    for size in [64, 128, 256, 1024]:\n        new_hyperparameters = deepcopy(hyperparameters)\n        new_hyperparameters['layer_sizes'] = [size]\n        result.append(new_hyperparameters)\n    return result\n\n\ndef transform_to_no_pretrain(hyperparameters):\n    result = deepcopy(hyperparameters)\n    result['train_data']['pretrain'] = False\n    return [result]\n\n\ndef transform_to_compact_peptide(hyperparameters):\n    result = deepcopy(hyperparameters)\n    result['peptide_encoding']['alignment_method'] = 'left_pad_right_pad'\n    return [result]\n\n\nTRANSFORMS={\n    \"single_hidden\": transform_to_single_hidden,\n    \"no_pretrain\": transform_to_no_pretrain,\n    \"compact_peptide\": transform_to_compact_peptide,\n}\n\ntransform = TRANSFORMS[args.kind]\n\nresult_list = []\nfor item in production_hyperparameters_list:\n    results = transform(item)\n    for result_item in results:\n        if result_item not in result_list:\n            result_list.append(result_item)\n\ndump(result_list, stdout)\n"
  },
  {
    "path": "downloads-generation/models_class1_presentation/GENERATE.sh",
    "content": "#!/bin/bash\n#\n#\n# Usage: GENERATE.sh <local|cluster> <fresh|continue-incomplete>\n#\n# cluster mode uses an HPC cluster (Mount Sinai chimera cluster, which uses lsf job\n# scheduler). This would need to be modified for other sites.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_presentation\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nif [ \"$2\" != \"continue-incomplete\" ]\nthen\n    echo \"Fresh run\"\n    rm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n    mkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nelse\n    echo \"Continuing incomplete run\"\nfi\n\n# Send stdout and stderr to a logfile included with the archive.\nLOG=\"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.$(date +%s).txt\"\nexec >  >(tee -ia \"$LOG\")\nexec 2> >(tee -ia \"$LOG\" >&2)\n\n# Log some environment info\necho \"Invocation: $0 $@\"\ndate\npip freeze\ngit status\nmhcflurry-downloads info\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nexport OMP_NUM_THREADS=1\nexport PYTHONUNBUFFERED=1\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"train_data.csv.bz2\" ]\nthen\n    echo \"Reusing existing training data\"\nelse\n    cp $SCRIPT_DIR/make_train_data.py .\n    time python make_train_data.py \\\n        --hits \"$(mhcflurry-downloads path models_class1_processing)/hits_with_tpm.csv.bz2\" \\\n        --proteome-peptides \"$(mhcflurry-downloads path models_class1_processing)/proteome_peptides.csv.bz2\" \\\n        --decoys-per-hit 2 \\\n        --exclude-pmid 31844290 31495665 31154438 \\\n        --only-format MULTIALLELIC \\\n        --sample-fraction 0.1 \\\n        --out \"$(pwd)/train_data.csv\"\n    bzip2 -f train_data.csv\nfi\n\nrm -rf commands\nmkdir commands\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"models/weights.csv\" ]\nthen\n    echo \"Reusing existing trained predictor\"\nelse\n    echo time mhcflurry-class1-train-presentation-models \\\n        --data \"$(pwd)/train_data.csv.bz2\" \\\n        --affinity-predictor \\\"\"$(mhcflurry-downloads path models_class1_pan)/models.combined\"\\\" \\\n        --processing-predictor-with-flanks \\\"\"$(mhcflurry-downloads path models_class1_processing)/models.selected.short_flanks\"\\\" \\\n        --processing-predictor-without-flanks \\\"\"$(mhcflurry-downloads path models_class1_processing)/models.selected.no_flank\"\\\" \\\n        --out-models-dir \"$(pwd)/models\" >> commands/train.sh\nfi\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"models/percent_ranks.csv\" ]\nthen\n    echo \"Reusing existing percentile ranks\"\nelse\n    echo time mhcflurry-calibrate-percentile-ranks \\\n        --models-dir \"$(pwd)/models\" \\\n        --match-amino-acid-distribution-data \\\"\"$(mhcflurry-downloads path models_class1_pan)/models.combined/train_data.csv.bz2\"\\\" \\\n        --alleles-file \\\"\"$(mhcflurry-downloads path models_class1_pan)/models.combined/train_data.csv.bz2\"\\\" \\\n        --predictor-kind class1_presentation \\\n        --num-peptides-per-length 10000 \\\n        --alleles-per-genotype 1 \\\n        --num-genotypes 50 \\\n        --verbosity 1 >> commands/train.sh\nfi\n\nls -lh commands\n\nif [ \"$1\" != \"cluster\" ]\nthen\n    echo \"Running locally\"\n    for i in $(ls commands/*.sh)\n    do\n        echo \"# *******\"\n        echo \"# Command $i\"\n        cat $i\n        bash $i\n    done\nelse\n    echo \"Running on cluster\"\n    for i in $(ls commands/*.sh)\n    do\n        echo \"# *******\"\n        echo \"# Command $i\"\n        cat $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.lsf > ${i}.lsf\n        echo cd \"$(pwd)\" >> ${i}.lsf\n        cat $i >> ${i}.lsf\n        cat ${i}.lsf\n        bsub -K < \"${i}.lsf\" &\n    done\n    wait\nfi\n\nfor i in $(ls commands/*.sh)\ndo\n    mv \"$i\" \"${i}.FINISHED\"\ndone\n\ncp \"$(mhcflurry-downloads path models_class1_pan)/models.combined/train_data.csv.bz2\" models/affinity_predictor_train_data.csv.bz2\ncp \"$(mhcflurry-downloads path models_class1_processing)/models.selected.with_flanks/train_data.csv.bz2\" models/processing_predictor_train_data.csv.bz2\ncp \"$(mhcflurry-downloads path models_class1_processing)/models.selected.no_flank/train_data.csv.bz2\" models/processing_predictor_no_flank_train_data.csv.bz2\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 -f \"$LOG\"\nfor i in $(ls LOG-worker.*.txt) ; do bzip2 -f $i ; done\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/models_class1_presentation/cluster_submit_script_header.mssm_hpc.lsf",
    "content": "#!/bin/bash\n#BSUB -J MHCf # Job name\n#BSUB -P acc_nkcancer # allocation account or Unix group\n#BSUB -q premium # queue\n#BSUB -R span[hosts=1] # one node\n#BSUB -n 1 # number of compute cores\n#BSUB -W 40:00 # walltime in HH:MM\n#BSUB -R rusage[mem=45000] # mb memory requested\n#BSUB -o %J.stdout # output log (%J : JobID)\n#BSUB -eo %J.stderr # error log\n#BSUB -L /bin/bash # Initialize the execution environment\n#\n\nset -e\nset -x\n\necho \"Subsequent stderr output redirected to stdout\" >&2\nexec 2>&1\n\nexport TMPDIR=/local/JOBS/mhcflurry-{work_item_num}\nexport PATH=$HOME/mhcflurry-conda-environment/bin/:$PATH\nexport PYTHONUNBUFFERED=1\nexport KMP_SETTINGS=1\n\nfree -m\nmodule list\nenv\n\n"
  },
  {
    "path": "downloads-generation/models_class1_presentation/make_train_data.py",
    "content": "\"\"\"\nMake training data by selecting decoys, etc.\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport numpy\nimport collections\n\nimport pandas\nimport tqdm\n\nimport mhcflurry\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--hits\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Multiallelic mass spec\")\nparser.add_argument(\n    \"--proteome-peptides\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Proteome peptides\")\nparser.add_argument(\n    \"--decoys-per-hit\",\n    type=float,\n    metavar=\"N\",\n    default=99,\n    help=\"Decoys per hit\")\nparser.add_argument(\n    \"--exclude-pmid\",\n    nargs=\"+\",\n    default=[],\n    help=\"Exclude given PMID\")\nparser.add_argument(\n    \"--only-pmid\",\n    nargs=\"*\",\n    default=[],\n    help=\"Include only the given PMID\")\nparser.add_argument(\n    \"--exclude-train-data\",\n    nargs=\"+\",\n    default=[],\n    help=\"Remove hits and decoys included in the given training data\")\nparser.add_argument(\n    \"--only-format\",\n    choices=(\"MONOALLELIC\", \"MULTIALLELIC\"),\n    help=\"Include only data of the given format\")\nparser.add_argument(\n    \"--sample-fraction\",\n    type=float,\n    help=\"Subsample data by specified fraction (e.g. 0.1)\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"File to write\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    hit_df = pandas.read_csv(args.hits)\n    hit_df[\"pmid\"] = hit_df[\"pmid\"].astype(str)\n    original_samples_pmids = hit_df.pmid.unique()\n    numpy.testing.assert_equal(hit_df.hit_id.nunique(), len(hit_df))\n    hit_df = hit_df.loc[\n        (hit_df.mhc_class == \"I\") &\n        (hit_df.peptide.str.len() <= 11) &\n        (hit_df.peptide.str.len() >= 8) &\n        (~hit_df.protein_ensembl.isnull()) &\n        (hit_df.peptide.str.match(\"^[%s]+$\" % \"\".join(\n            mhcflurry.amino_acid.COMMON_AMINO_ACIDS)))\n    ]\n    hit_df['alleles'] = hit_df.hla.str.split().map(tuple)\n    print(\"Loaded hits from %d samples\" % hit_df.sample_id.nunique())\n    if args.only_format:\n        hit_df = hit_df.loc[hit_df.format == args.only_format].copy()\n        print(\"Subselected to %d %s samples\" % (\n            hit_df.sample_id.nunique(), args.only_format))\n\n    if args.only_pmid or args.exclude_pmid:\n        assert not (args.only_pmid and args.exclude_pmid)\n\n        pmids = list(args.only_pmid) + list(args.exclude_pmid)\n        missing = [pmid for pmid in pmids if pmid not in original_samples_pmids]\n        assert not missing, (missing, original_samples_pmids)\n\n        mask = hit_df.pmid.isin(pmids)\n        if args.exclude_pmid:\n            mask = ~mask\n\n        new_hit_df = hit_df.loc[mask]\n        print(\n            \"Selecting by pmids\",\n            pmids,\n            \"reduced dataset from\",\n            len(hit_df),\n            \"to\",\n            len(new_hit_df))\n        hit_df = new_hit_df.copy()\n        print(\"Subselected by pmid to %d samples\" % hit_df.sample_id.nunique())\n\n    allele_to_excluded_peptides = collections.defaultdict(set)\n    for train_dataset in args.exclude_train_data:\n        if not train_dataset:\n            continue\n        print(\"Excluding hits from\", train_dataset)\n        train_df = pandas.read_csv(train_dataset)\n        for (allele, peptides) in train_df.groupby(\"allele\").peptide.unique().iteritems():\n            allele_to_excluded_peptides[allele].update(peptides)\n        train_counts = train_df.groupby(\n            [\"allele\", \"peptide\"]).measurement_value.count().to_dict()\n        hit_no_train = hit_df.loc[\n            [\n                not any([\n                    train_counts.get((allele, row.peptide))\n                    for allele in row.alleles\n                ])\n            for _, row in tqdm.tqdm(hit_df.iterrows(), total=len(hit_df))]\n        ]\n        print(\n            \"Excluding hits from\",\n            train_dataset,\n            \"reduced dataset from\",\n            len(hit_df),\n            \"to\",\n            len(hit_no_train))\n        hit_df = hit_no_train\n\n    sample_table = hit_df.drop_duplicates(\"sample_id\").set_index(\"sample_id\")\n    grouped = hit_df.groupby(\"sample_id\").nunique()\n    for col in sample_table.columns:\n        if (grouped[col] > 1).any():\n            del sample_table[col]\n\n    print(\"Loading proteome peptides\")\n    all_peptides_df = pandas.read_csv(args.proteome_peptides)\n    print(\"Loaded: \", all_peptides_df.shape)\n\n    all_peptides_df = all_peptides_df.loc[\n        all_peptides_df.protein_accession.isin(hit_df.protein_accession.unique()) &\n        all_peptides_df.peptide.str.match(\"^[%s]+$\" % \"\".join(\n            mhcflurry.amino_acid.COMMON_AMINO_ACIDS))\n    ].copy()\n    all_peptides_df[\"length\"] = all_peptides_df.peptide.str.len()\n    print(\"Subselected proteome peptides by accession: \", all_peptides_df.shape)\n\n    all_peptides_by_length = dict(iter(all_peptides_df.groupby(\"length\")))\n\n    print(\"Selecting decoys.\")\n\n    lengths = [8, 9, 10, 11]\n    result_df = []\n\n    for sample_id, sub_df in tqdm.tqdm(\n            hit_df.groupby(\"sample_id\"), total=hit_df.sample_id.nunique()):\n        result_df.append(\n            sub_df[[\n                \"protein_accession\",\n                \"peptide\",\n                \"sample_id\",\n                \"n_flank\",\n                \"c_flank\",\n            ]].copy())\n        result_df[-1][\"hit\"] = 1\n\n        excluded_peptides = set()\n        for allele in sample_table.loc[sample_id].alleles:\n            excluded_peptides.update(allele_to_excluded_peptides[allele])\n        print(\n            sample_id,\n            \"will exclude\",\n            len(excluded_peptides),\n            \"peptides from decoy universe\")\n\n        for length in lengths:\n            universe = all_peptides_by_length[length]\n            possible_universe = universe.loc[\n                (~universe.peptide.isin(sub_df.peptide.unique())) &\n                (~universe.peptide.isin(excluded_peptides)) &\n                (universe.protein_accession.isin(sub_df.protein_accession.unique()))\n            ]\n            selected_decoys = possible_universe.sample(\n                n=int(len(sub_df) * args.decoys_per_hit / len(lengths)))\n\n            result_df.append(selected_decoys[\n                [\"protein_accession\", \"peptide\", \"n_flank\", \"c_flank\"]\n            ].copy())\n            result_df[-1][\"hit\"] = 0\n            result_df[-1][\"sample_id\"] = sample_id\n\n    result_df = pandas.concat(result_df, ignore_index=True, sort=False)\n    result_df[\"hla\"] = result_df.sample_id.map(sample_table.hla)\n\n    print(result_df)\n    print(\"Counts:\")\n    print(result_df.groupby([\"sample_id\", \"hit\"]).peptide.nunique())\n\n    print(\"Hit counts:\")\n    print(\n        result_df.loc[\n            result_df.hit == 1\n        ].groupby(\"sample_id\").hit.count().sort_values())\n\n    print(\"Hit rates:\")\n    print(result_df.groupby(\"sample_id\").hit.mean().sort_values())\n\n    if args.sample_fraction:\n        print(\"Subsampling to \", args.sample_fraction)\n        result_df = result_df.sample(frac=args.sample_fraction)\n        print(\"Subsampled:\")\n        print(result_df)\n        print(\"Hit rates:\")\n        print(result_df.groupby(\"sample_id\").hit.mean().sort_values())\n\n    result_df.to_csv(args.out, index=False)\n    print(\"Wrote: \", args.out)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/models_class1_processing/GENERATE.WITH_HPC_CLUSTER.sh",
    "content": "bash GENERATE.sh cluster\n"
  },
  {
    "path": "downloads-generation/models_class1_processing/GENERATE.sh",
    "content": "#!/bin/bash\n#\n#\n# Usage: GENERATE.sh <local|cluster> <fresh|continue-incomplete>\n#\n# cluster mode uses an HPC cluster (Mount Sinai chimera cluster, which uses lsf job\n# scheduler). This would need to be modified for other sites.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_processing\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nif [ \"$1\" != \"cluster\" ]\nthen\n    GPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\n    echo \"Detected GPUS: $GPUS\"\n\n    PROCESSORS=$(getconf _NPROCESSORS_ONLN)\n    echo \"Detected processors: $PROCESSORS\"\n\n    if [ \"$GPUS\" -eq \"0\" ]; then\n       NUM_JOBS=${NUM_JOBS-1}\n    else\n        NUM_JOBS=${NUM_JOBS-$GPUS}\n    fi\n    echo \"Num jobs: $NUM_JOBS\"\n    PARALLELISM_ARGS+=\" --num-jobs $NUM_JOBS --max-tasks-per-worker 1 --gpus $GPUS --max-workers-per-gpu 1\"\nelse\n    PARALLELISM_ARGS+=\" --cluster-parallelism --cluster-max-retries 3 --cluster-submit-command bsub --cluster-results-workdir $HOME/mhcflurry-scratch --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.lsf\"\nfi\n\nmkdir -p \"$SCRATCH_DIR\"\nif [ \"$2\" != \"continue-incomplete\" ]\nthen\n    echo \"Fresh run\"\n    rm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n    mkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nelse\n    echo \"Continuing incomplete run\"\nfi\n\n# Send stdout and stderr to a logfile included with the archive.\nLOG=\"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.$(date +%s).txt\"\nexec >  >(tee -ia \"$LOG\")\nexec 2> >(tee -ia \"$LOG\" >&2)\n\n# Log some environment info\necho \"Invocation: $0 $@\"\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nexport OMP_NUM_THREADS=1\nexport PYTHONUNBUFFERED=1\n\nVARIANTS=( with_flanks no_flank short_flanks )\n#VARIANTS=( with_flanks no_flank )\n\ncp $SCRIPT_DIR/generate_hyperparameters.variants.py .\ncp $SCRIPT_DIR/generate_hyperparameters.base.py .\npython generate_hyperparameters.base.py > hyperparameters.base.yaml\nfor kind in \"${VARIANTS[@]}\"\ndo\n    python generate_hyperparameters.variants.py hyperparameters.base.yaml $kind > hyperparameters.$kind.yaml\ndone\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"hits_with_tpm.csv.bz2\" ]\nthen\n    echo \"Reusing existing expression-annotated hits data\"\nelse\n    cp $SCRIPT_DIR/annotate_hits_with_expression.py .\n    time python annotate_hits_with_expression.py \\\n        --hits \"$(mhcflurry-downloads path data_mass_spec_annotated)/annotated_ms.csv.bz2\" \\\n        --expression \"$(mhcflurry-downloads path data_curated)/rna_expression.csv.bz2\" \\\n        --out \"$(pwd)/hits_with_tpm.csv\"\n    bzip2 -f hits_with_tpm.csv\nfi\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"proteome_peptides.csv.bz2\" ]\nthen\n    echo \"Reusing existing proteome peptide list\"\nelse\n    cp $SCRIPT_DIR/write_proteome_peptides.py .\n    time python write_proteome_peptides.py \\\n        \"$(mhcflurry-downloads path data_mass_spec_annotated)/annotated_ms.csv.bz2\" \\\n        \"$(mhcflurry-downloads path data_references)/uniprot_proteins.csv.bz2\" \\\n        --out \"$(pwd)/proteome_peptides.csv\"\n    bzip2 -f proteome_peptides.csv\nfi\n\n\nif [ \"$2\" == \"continue-incomplete\" ] && [ -f \"train_data.csv.bz2\" ]\nthen\n    echo \"Reusing existing training data\"\nelse\n    cp $SCRIPT_DIR/make_train_data.py .\n    echo \"Using affinity predictor:\"\n    cat \"$(mhcflurry-downloads path models_class1_pan)/models.combined/info.txt\"\n\n    time python \"$(pwd)/make_train_data.py\" \\\n        --hits \"$(pwd)/hits_with_tpm.csv.bz2\" \\\n        --affinity-predictor \"$(mhcflurry-downloads path models_class1_pan)/models.combined\" \\\n        --proteome-peptides \"$(pwd)/proteome_peptides.csv.bz2\" \\\n        --ppv-multiplier 100 \\\n        --hit-multiplier-to-take 2 \\\n        --out \"$(pwd)/train_data.csv\" \\\n        $PARALLELISM_ARGS\n    bzip2 -f \"$(pwd)/train_data.csv\"\nfi\n\n\nTRAIN_DATA=\"$(pwd)/train_data.csv.bz2\"\n\nfor kind in \"${VARIANTS[@]}\"\ndo\n    CONTINUE_INCOMPLETE_ARGS=\"\"\n    if [ \"$2\" == \"continue-incomplete\" ] && [ -d \"models.unselected.$kind\" ]\n    then\n        echo \"Will continue existing run: $kind\"\n        CONTINUE_INCOMPLETE_ARGS=\"--continue-incomplete\"\n    fi\n\n    mhcflurry-class1-train-processing-models \\\n        --data \"$TRAIN_DATA\" \\\n        --held-out-samples 10 \\\n        --num-folds 4 \\\n        --hyperparameters \"$(pwd)/hyperparameters.$kind.yaml\" \\\n        --out-models-dir \"$(pwd)/models.unselected.$kind\" \\\n        --worker-log-dir \"$SCRATCH_DIR/$DOWNLOAD_NAME\" \\\n        $PARALLELISM_ARGS $CONTINUE_INCOMPLETE_ARGS\ndone\n\necho \"Done training. Beginning model selection.\"\n\nfor kind in \"${VARIANTS[@]}\"\ndo\n    if [ \"$2\" == \"continue-incomplete\" ] && [ -f \"models.selected.$kind/train_data.csv.bz2\" ]\n    then\n        echo \"Reusing existing selected models for $kind\"\n    else\n        MODELS_DIR=\"$(pwd)/models.unselected.$kind\"\n        mhcflurry-class1-select-processing-models \\\n            --data \"$MODELS_DIR/train_data.csv.bz2\" \\\n            --models-dir \"$MODELS_DIR\" \\\n            --out-models-dir \"$(pwd)/models.selected.$kind\" \\\n            --min-models 1 \\\n            --max-models 2 \\\n            $PARALLELISM_ARGS\n        cp \"$MODELS_DIR/train_data.csv.bz2\" \"models.selected.$kind/train_data.csv.bz2\"\n    fi\ndone\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 -f \"$LOG\"\nfor i in $(ls LOG-worker.*.txt) ; do bzip2 -f $i ; done\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\necho \"Created archive: $RESULT\"\n\n# Split into <2GB chunks for GitHub\nPARTS=\"${RESULT}.part.\"\n# Check for pre-existing part files and rename them.\nfor i in $(ls \"${PARTS}\"* )\ndo\n    DEST=\"${i}.OLD.$(date +%s)\"\n    echo \"WARNING: already exists: $i . Moving to $DEST\"\n    mv $i $DEST\ndone\nsplit -b 2000M \"$RESULT\" \"$PARTS\"\necho \"Split into parts:\"\nls -lh \"${PARTS}\"*\n\n# Write out just the selected models\n# Move unselected into a hidden dir so it is excluded in the glob (*).\nmkdir .ignored\nmv models.unselected.* .ignored/\nRESULT=\"$SCRATCH_DIR/${DOWNLOAD_NAME}.selected.$(date +%Y%m%d).tar.bz2\"\ntar -cjf \"$RESULT\" *\nmv .ignored/* . && rmdir .ignored\necho \"Created archive: $RESULT\"\n"
  },
  {
    "path": "downloads-generation/models_class1_processing/annotate_hits_with_expression.py",
    "content": "\"\"\"\nAnnotate hits with expression (tpm), and roll up to just the highest-expressed\ngene for each peptide.\n\"\"\"\nimport sys\nimport argparse\nimport os\n\n\nimport pandas\nimport tqdm\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--hits\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Multiallelic mass spec\")\nparser.add_argument(\n    \"--expression\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Expression data\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"File to write\")\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    args.out = os.path.abspath(args.out)\n\n    hit_df = pandas.read_csv(args.hits)\n    hit_df = hit_df.loc[\n        (~hit_df.protein_ensembl.isnull())\n    ]\n    print(\"Loaded hits from %d samples\" % hit_df.sample_id.nunique())\n    expression_df = pandas.read_csv(args.expression, index_col=0).fillna(0)\n\n    # Add a column to hit_df giving expression value for that sample and that gene\n    print(\"Annotating expression.\")\n    hit_df[\"tpm\"] = [\n        expression_df.reindex(\n            row.protein_ensembl.split())[row.expression_dataset].sum()\n        for _, row in tqdm.tqdm(\n            hit_df.iterrows(), total=len(hit_df), ascii=True, maxinterval=10000)\n    ]\n\n    # Discard hits except those that have max expression for each hit_id\n    print(\"Selecting max-expression transcripts for each hit.\")\n    max_gene_hit_df = hit_df.loc[\n        hit_df.tpm == hit_df.hit_id.map(hit_df.groupby(\"hit_id\").tpm.max())\n    ].sample(frac=1.0).drop_duplicates(\"hit_id\")\n\n    max_gene_hit_df.to_csv(args.out, index=False)\n    print(\"Wrote\", args.out)\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/models_class1_processing/cluster_submit_script_header.mssm_hpc.lsf",
    "content": "#!/bin/bash\n#BSUB -J MHCf-{work_item_num} # Job name\n#BSUB -P acc_nkcancer # allocation account or Unix group\n#BSUB -q gpu # queue\n#BSUB -gpu \"num=1:mode=exclusive_process:mps=no:j_exclusive=yes\"\n#BSUB -R span[hosts=1] # one node\n#BSUB -n 1 # number of compute cores\n#BSUB -W 46:00 # walltime in HH:MM\n#BSUB -R rusage[mem=30000] # mb memory requested\n#BSUB -o {work_dir}/%J.stdout # output log (%J : JobID)\n#BSUB -eo {work_dir}/STDERR # error log\n#BSUB -L /bin/bash # Initialize the execution environment\n#\n\nset -e\nset -x\n\necho \"Subsequent stderr output redirected to stdout\" >&2\nexec 2>&1\n\nexport TMPDIR=/local/JOBS/mhcflurry-{work_item_num}\nexport PATH=$HOME/mhcflurry-conda-environment/bin/:$PATH\nexport PYTHONUNBUFFERED=1\nexport KMP_SETTINGS=1\n#export TF_GPU_ALLOCATOR=cuda_malloc_async\n\nfree -m\n\nmodule add cuda/11.8.0 cudnn/8.9.5-11\nmodule list\n\nnvidia-smi\n\nexport XLA_FLAGS=--xla_gpu_cuda_data_dir=$CUDA_PATH\n#export XLA_FLAGS='--xla_compile=False'\n\npython -c 'import tensorflow as tf ; print(\"GPU AVAILABLE\" if tf.test.is_gpu_available() else \"GPU NOT AVAILABLE\")'\n\nenv\n\ncd {work_dir}\n\n"
  },
  {
    "path": "downloads-generation/models_class1_processing/generate_hyperparameters.base.py",
    "content": "\"\"\"\nGenerate grid of hyperparameters\n\"\"\"\nfrom __future__ import print_function\nfrom sys import stdout, stderr\nfrom copy import deepcopy\nfrom yaml import dump\n\nbase_hyperparameters = dict(\n    convolutional_filters=64,\n    convolutional_kernel_size=8,\n    convolutional_kernel_l1_l2=(0.00, 0.0),\n    flanking_averages=True,\n    n_flank_length=15,\n    c_flank_length=15,\n    post_convolutional_dense_layer_sizes=[],\n    minibatch_size=512,\n    dropout_rate=0.5,\n    convolutional_activation=\"relu\",\n    patience=20,\n    learning_rate=0.001)\n\ngrid = []\n\n\ndef hyperparrameters_grid():\n    for learning_rate in [0.001]:\n        for convolutional_activation in [\"tanh\", \"relu\"]:\n            for convolutional_filters in [256, 512]:\n                for flanking_averages in [True]:\n                    for convolutional_kernel_size in [11, 13, 15, 17]:\n                        for l1 in [0.0, 1e-6]:\n                            for s in [[8], [16]]:\n                                for d in [0.3, 0.5]:\n                                    new = deepcopy(base_hyperparameters)\n                                    new[\"learning_rate\"] = learning_rate\n                                    new[\"convolutional_activation\"] = convolutional_activation\n                                    new[\"convolutional_filters\"] = convolutional_filters\n                                    new[\"flanking_averages\"] = flanking_averages\n                                    new[\"convolutional_kernel_size\"] = convolutional_kernel_size\n                                    new[\"convolutional_kernel_l1_l2\"] = (l1, 0.0)\n                                    new[\"post_convolutional_dense_layer_sizes\"] = s\n                                    new[\"dropout_rate\"] = d\n                                    yield new\n\n\nfor new in hyperparrameters_grid():\n    if new not in grid:\n        grid.append(new)\n\nprint(\"Hyperparameters grid size: %d\" % len(grid), file=stderr)\ndump(grid, stdout)\n"
  },
  {
    "path": "downloads-generation/models_class1_processing/generate_hyperparameters.variants.py",
    "content": "\"\"\"\nGenerate grid of hyperparameters\n\"\"\"\n\nfrom sys import stdout, argv\nfrom copy import deepcopy\nfrom yaml import dump, unsafe_load\nimport argparse\n\nparser = argparse.ArgumentParser(usage=__doc__)\nparser.add_argument(\n    \"production_hyperparameters\",\n    metavar=\"data.json\",\n    help=\"Production (i.e. standard) hyperparameters grid.\")\nparser.add_argument(\n    \"kind\",\n    choices=('with_flanks', 'no_n_flank', 'no_c_flank', 'no_flank', 'short_flanks'),\n    help=\"Hyperameters variant to output\")\n\nargs = parser.parse_args(argv[1:])\n\nwith open(args.production_hyperparameters) as fd:\n    production_hyperparameters_list = unsafe_load(fd)\n\n\ndef transform(kind, hyperparameters):\n    new_hyperparameters = deepcopy(hyperparameters)\n    if kind == \"no_n_flank\" or kind == \"no_flank\":\n        new_hyperparameters[\"n_flank_length\"] = 0\n    if kind == \"no_c_flank\" or kind == \"no_flank\":\n        new_hyperparameters[\"c_flank_length\"] = 0\n    if kind == \"short_flanks\":\n        new_hyperparameters[\"c_flank_length\"] = 5\n        new_hyperparameters[\"n_flank_length\"] = 5\n    return [new_hyperparameters]\n\n\nresult_list = []\nfor item in production_hyperparameters_list:\n    results = transform(args.kind, item)\n    for result_item in results:\n        if result_item not in result_list:\n            result_list.append(result_item)\n\ndump(result_list, stdout)\n"
  },
  {
    "path": "downloads-generation/models_class1_processing/make_train_data.py",
    "content": "\"\"\"\nMake training data by selecting decoys, etc.\n\"\"\"\nimport sys\nimport argparse\nimport os\nimport numpy\nimport time\nfrom functools import partial\n\nimport pandas\nimport tqdm\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\nfrom mhcflurry.common import configure_logging\nfrom mhcflurry.local_parallelism import (\n    add_local_parallelism_args,\n    worker_pool_with_gpu_assignments_from_args,\n    call_wrapped_kwargs)\nfrom mhcflurry.cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--hits\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Multiallelic mass spec\")\nparser.add_argument(\n    \"--affinity-predictor\",\n    required=True,\n    metavar=\"CSV\",\n    help=\"Class 1 affinity predictor to use\")\nparser.add_argument(\n    \"--proteome-peptides\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"Proteome peptides\")\nparser.add_argument(\n    \"--hit-multiplier-to-take\",\n    type=float,\n    default=1,\n    help=\"\")\nparser.add_argument(\n    \"--ppv-multiplier\",\n    type=int,\n    metavar=\"N\",\n    default=1000,\n    help=\"Take top 1/N predictions.\")\nparser.add_argument(\n    \"--exclude-contig\",\n    help=\"Exclude entries annotated to the given contig\")\nparser.add_argument(\n    \"--out\",\n    metavar=\"CSV\",\n    required=True,\n    help=\"File to write\")\nparser.add_argument(\n    \"--alleles\",\n    nargs=\"+\",\n    help=\"Include only the specified alleles\")\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\n\ndef do_process_samples(samples, constant_data=None):\n    import mhcflurry\n    import pandas\n    import tqdm\n    tqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n    columns_to_keep = [\n        \"hit_id\",\n        \"protein_accession\",\n        \"n_flank\",\n        \"c_flank\",\n        \"peptide\",\n        \"sample_id\",\n        \"affinity_prediction\",\n        \"hit\",\n    ]\n\n    if constant_data is None:\n        constant_data = GLOBAL_DATA\n\n    args = constant_data['args']\n    lengths = constant_data['lengths']\n    all_peptides_by_length = constant_data['all_peptides_by_length']\n    sample_table = constant_data['sample_table']\n\n    hit_df = constant_data['hit_df']\n    hit_df = hit_df.loc[\n        hit_df.sample_id.isin(samples)\n    ]\n\n    affinity_predictor = mhcflurry.Class1AffinityPredictor.load(\n        args.affinity_predictor)\n    print(\"Loaded\", affinity_predictor)\n\n    result_df = []\n    for sample_id, sub_hit_df in tqdm.tqdm(\n            hit_df.groupby(\"sample_id\"), total=hit_df.sample_id.nunique()):\n\n        sub_hit_df = sub_hit_df.copy()\n        sub_hit_df[\"hit\"] = 1\n\n        decoys_df = []\n        for length in lengths:\n            universe = all_peptides_by_length[length]\n            decoys_df.append(\n                universe.loc[\n                    (~universe.peptide.isin(sub_hit_df.peptide.unique())) &\n                    (universe.protein_accession.isin(sub_hit_df.protein_accession.unique()))\n                ].sample(\n                    n=int(len(sub_hit_df) * args.ppv_multiplier / len(lengths)))[[\n                        \"protein_accession\", \"peptide\", \"n_flank\", \"c_flank\"\n                ]].drop_duplicates(\"peptide\"))\n\n        merged_df = pandas.concat(\n            [sub_hit_df] + decoys_df, ignore_index=True, sort=False)\n\n        prediction_col = \"%s affinity\" % sample_table.loc[sample_id].hla\n        predictions_df = pandas.DataFrame(\n            index=merged_df.peptide.unique(),\n            columns=[prediction_col])\n\n        predictions_df[prediction_col] = affinity_predictor.predict(\n            predictions_df.index,\n            allele=sample_table.loc[sample_id].hla)\n\n        merged_df[\"affinity_prediction\"] = merged_df.peptide.map(\n            predictions_df[prediction_col])\n        merged_df = merged_df.sort_values(\"affinity_prediction\", ascending=True)\n\n        num_to_take = int(len(sub_hit_df) * args.hit_multiplier_to_take)\n        selected_df = merged_df.head(num_to_take)[\n                columns_to_keep\n        ].sample(frac=1.0).copy()\n        selected_df[\"hit\"] = selected_df[\"hit\"].fillna(0)\n        selected_df[\"sample_id\"] = sample_id\n        result_df.append(selected_df)\n\n        print(\n            \"Processed sample\",\n            sample_id,\n            \"with hit and decoys:\\n\",\n            selected_df.hit.value_counts())\n\n    result_df = pandas.concat(result_df, ignore_index=True, sort=False)\n    return result_df\n\n\ndef run():\n    import mhcflurry\n\n    args = parser.parse_args(sys.argv[1:])\n\n    configure_logging()\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n\n    hit_df = pandas.read_csv(args.hits)\n    numpy.testing.assert_equal(hit_df.hit_id.nunique(), len(hit_df))\n    hit_df = hit_df.loc[\n        (hit_df.mhc_class == \"I\") &\n        (hit_df.peptide.str.len() <= 11) &\n        (hit_df.peptide.str.len() >= 8) &\n        (~hit_df.protein_ensembl.isnull()) &\n        (hit_df.peptide.str.match(\"^[%s]+$\" % \"\".join(\n            mhcflurry.amino_acid.COMMON_AMINO_ACIDS)))\n    ]\n    print(\"Loaded hits from %d samples\" % hit_df.sample_id.nunique())\n    hit_df = hit_df.loc[hit_df.format == \"MONOALLELIC\"].copy()\n    print(\"Subselected to %d monoallelic samples\" % hit_df.sample_id.nunique())\n    hit_df[\"allele\"] = hit_df.hla\n\n    hit_df = hit_df.loc[hit_df.allele.str.match(\"^HLA-[ABC]\")]\n    print(\"Subselected to %d HLA-A/B/C samples\" % hit_df.sample_id.nunique())\n\n    if args.exclude_contig:\n        new_hit_df = hit_df.loc[\n            hit_df.protein_primary_ensembl_contig.astype(str) !=\n            args.exclude_contig\n        ]\n        print(\n            \"Excluding contig\",\n            args.exclude_contig,\n            \"reduced dataset from\",\n            len(hit_df),\n            \"to\",\n            len(new_hit_df))\n        hit_df = new_hit_df.copy()\n    if args.alleles:\n        filter_alleles = set(args.alleles)\n        new_hit_df = hit_df.loc[\n            hit_df.allele.isin(filter_alleles)\n        ]\n        print(\n            \"Selecting alleles\",\n            args.alleles,\n            \"reduced dataset from\",\n            len(hit_df),\n            \"to\",\n            len(new_hit_df))\n        hit_df = new_hit_df.copy()\n\n    sample_table = hit_df.drop_duplicates(\"sample_id\").set_index(\"sample_id\")\n    grouped = hit_df.groupby(\"sample_id\").nunique()\n    for col in sample_table.columns:\n        if (grouped[col] > 1).any():\n            del sample_table[col]\n    sample_table[\"total_hits\"] = hit_df.groupby(\"sample_id\").peptide.nunique()\n\n    print(\"Loading proteome peptides\")\n    all_peptides_df = pandas.read_csv(args.proteome_peptides)\n    print(\"Loaded: \", all_peptides_df.shape)\n\n    all_peptides_df = all_peptides_df.loc[\n        all_peptides_df.protein_accession.isin(hit_df.protein_accession.unique()) &\n        all_peptides_df.peptide.str.match(\"^[%s]+$\" % \"\".join(\n            mhcflurry.amino_acid.COMMON_AMINO_ACIDS))\n    ].copy()\n    all_peptides_df[\"length\"] = all_peptides_df.peptide.str.len()\n    print(\"Subselected proteome peptides by accession: \", all_peptides_df.shape)\n\n    all_peptides_by_length = dict(iter(all_peptides_df.groupby(\"length\")))\n\n    print(\"Selecting decoys.\")\n\n    GLOBAL_DATA['args'] = args\n    GLOBAL_DATA['lengths'] = [8, 9, 10, 11]\n    GLOBAL_DATA['all_peptides_by_length'] = all_peptides_by_length\n    GLOBAL_DATA['sample_table'] = sample_table\n    GLOBAL_DATA['hit_df'] = hit_df\n\n    worker_pool = None\n    start = time.time()\n\n    tasks = [\n        {\"samples\": [sample]} for sample in hit_df.sample_id.unique()\n    ]\n\n    if serial_run:\n        # Serial run\n        print(\"Running in serial.\")\n        results = [do_process_samples(hit_df.sample_id.unique())]\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        print(\"Running on cluster.\")\n        results = cluster_results_from_args(\n            args,\n            work_function=do_process_samples,\n            work_items=tasks,\n            constant_data=GLOBAL_DATA,\n            input_serialization_method=\"dill\",\n            result_serialization_method=\"pickle\",\n            clear_constant_data=False)\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n        results = worker_pool.imap_unordered(\n            partial(call_wrapped_kwargs, do_process_samples),\n            tasks,\n            chunksize=1)\n\n    print(\"Reading results\")\n\n    result_df = []\n    for worker_result in tqdm.tqdm(results, total=len(tasks)):\n        for sample_id, selected_df in worker_result.groupby(\"sample_id\"):\n            print(\n                \"Received result for sample\",\n                sample_id,\n                \"with hit and decoys:\\n\",\n                selected_df.hit.value_counts())\n        result_df.append(worker_result)\n\n    print(\"Received all results in %0.2f sec\" % (time.time() - start))\n\n    result_df = pandas.concat(result_df, ignore_index=True, sort=False)\n    result_df[\"hla\"] = result_df.sample_id.map(sample_table.hla)\n\n    print(result_df)\n    print(\"Counts:\")\n    print(result_df.groupby([\"sample_id\", \"hit\"]).peptide.nunique())\n\n    print(\"Hit counts:\")\n    print(\n        result_df.loc[\n            result_df.hit == 1\n        ].groupby(\"sample_id\").hit.count().sort_values())\n\n    print(\"Hit rates:\")\n    print(result_df.groupby(\"sample_id\").hit.mean().sort_values())\n\n    result_df.to_csv(args.out, index=False)\n    print(\"Wrote: \", args.out)\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "downloads-generation/models_class1_selected_no_mass_spec/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Model select standard MHCflurry Class I models.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_selected_no_mass_spec\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\ncp $SCRIPT_DIR/write_validation_data.py .\n\nmkdir models\n\nGPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\necho \"Detected GPUS: $GPUS\"\n\nPROCESSORS=$(getconf _NPROCESSORS_ONLN)\necho \"Detected processors: $PROCESSORS\"\n\ntime python ./write_validation_data.py \\\n    --include \"$(mhcflurry-downloads path data_curated)/curated_training_data.no_mass_spec.csv.bz2\" \\\n    --exclude \"$(mhcflurry-downloads path models_class1_unselected)/models/train_data.csv.bz2\" \\\n    --only-alleles-present-in-exclude \\\n    --out-data test.csv \\\n    --out-summary test.summary.csv\n\nwc -l test.csv\n\ntime mhcflurry-class1-select-allele-specific-models \\\n    --data test.csv \\\n    --models-dir \"$(mhcflurry-downloads path models_class1_unselected)/models\" \\\n    --out-models-dir models \\\n    --scoring combined:mse,consensus \\\n    --consensus-num-peptides-per-length 10000 \\\n    --combined-min-models 8 \\\n    --combined-max-models 16 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 5\n\ntime mhcflurry-calibrate-percentile-ranks \\\n    --models-dir models \\\n    --num-peptides-per-length 100000 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/models_class1_trained_with_mass_spec/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Model select MHCflurry Class I models that were trained on mass-spec. Model\n# selection uses both mass-spec and affinity data.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_trained_with_mass_spec\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\ncp $SCRIPT_DIR/write_validation_data.py .\n\nmkdir models\n\nGPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\necho \"Detected GPUS: $GPUS\"\n\nPROCESSORS=$(getconf _NPROCESSORS_ONLN)\necho \"Detected processors: $PROCESSORS\"\n\npython ./write_validation_data.py \\\n    --include \"$(mhcflurry-downloads path data_curated)/curated_training_data.with_mass_spec.csv.bz2\" \\\n    --exclude \"$(mhcflurry-downloads path models_class1_unselected_with_mass_spec)/models/train_data.csv.bz2\" \\\n    --only-alleles-present-in-exclude \\\n    --out-data test.csv \\\n    --out-summary test.summary.csv\n\nwc -l test.csv\n\ntime mhcflurry-class1-select-allele-specific-models \\\n    --data test.csv \\\n    --models-dir \"$(mhcflurry-downloads path models_class1_unselected_with_mass_spec)/models\" \\\n    --out-models-dir models \\\n    --scoring combined:mass-spec,mse,consensus \\\n    --consensus-num-peptides-per-length 10000 \\\n    --combined-min-models 8 \\\n    --combined-max-models 16 \\\n    --unselected-accuracy-scorer combined:mass-spec,mse \\\n    --unselected-accuracy-percentile-threshold 95 \\\n    --mass-spec-min-measurements 500 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 1\n\ntime mhcflurry-calibrate-percentile-ranks \\\n    --models-dir models \\\n    --num-peptides-per-length 100000 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/models_class1_unselected/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Train standard MHCflurry Class I models.\n# Calls mhcflurry-class1-train-allele-specific-models on curated training data\n# using the hyperparameters in \"hyperparameters.yaml\".\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_unselected\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nmkdir models\n\ncp $SCRIPT_DIR/class1_pseudosequences.csv .\n\npython $SCRIPT_DIR/generate_hyperparameters.py > hyperparameters.yaml\n\nGPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\necho \"Detected GPUS: $GPUS\"\n\nPROCESSORS=$(getconf _NPROCESSORS_ONLN)\necho \"Detected processors: $PROCESSORS\"\n\ntime mhcflurry-class1-train-allele-specific-models \\\n    --data \"$(mhcflurry-downloads path data_curated)/curated_training_data.no_mass_spec.csv.bz2\" \\\n    --allele-sequences class1_pseudosequences.csv \\\n    --hyperparameters hyperparameters.yaml \\\n    --out-models-dir models \\\n    --held-out-fraction-reciprocal 10 \\\n    --min-measurements-per-allele 25 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/models_class1_unselected/README.md",
    "content": "# Class I allele-specific models (ensemble)\n\nThis download contains trained MHC Class I MHCflurry models.\n\nTo generate this download run:\n\n```\n./GENERATE.sh\n```"
  },
  {
    "path": "downloads-generation/models_class1_unselected/class1_pseudosequences.csv",
    "content": "allele,pseudosequence\nHLA-A*01:01,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:02,YSAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:03,YFAMYQENMAHTDANTLYIMYRDYTWVARVYRGY\nHLA-A*01:06,YFAMYQENMAHTDANTLYIIYRDYTWVALAYRGY\nHLA-A*01:07,YFAMYQENVAHTDENTLYIIYRDYTWVARVYRGY\nHLA-A*01:08,YFAMYQENMAHTDANTLYIIYRDYTWVARVYWGY\nHLA-A*01:09,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:10,YFAMYQENMAHTDANTLYIIYRDYTWARRVYRGY\nHLA-A*01:12,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A*01:13,YFAMYQENMAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*01:14,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTGY\nHLA-A*01:17,YFAMYQENMAQTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:19,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A*01:20,YSAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A*01:21,YFAMYQENMAHTDANTLYIIYRDYTWAVRVYRGY\nHLA-A*01:23,YFAMYQENVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:24,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:25,YFAMYQENMAHTDANTLYIIYRDYTWVAQVYRGY\nHLA-A*01:26,YFAMYQENMAHTDANTLYIIYRDYTWAARVYRGY\nHLA-A*01:28,YFAMYQENMAHTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*01:29,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:30,YFAMYQENMAHTDANTLYIIYHYYTWVARVYRGY\nHLA-A*01:32,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:33,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:35,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:36,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:37,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:38,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:39,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:40,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:41,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:42,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:43,YYAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:44,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:45,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:46,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:47,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:48,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:49,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:50,YFAMYQENMAHTDANTLYIIYREYTWVARVYRGY\nHLA-A*01:51,YFAMYRNNVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:54,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:55,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:58,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:59,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:60,YFAMYPENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:61,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:62,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:63,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:64,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:65,YFAMYQENMAHTDANTLYIIYRDYTWVARVCRGY\nHLA-A*01:66,YFAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A*02:01,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:02,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:03,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:04,YFAMYGEKVAHTHVDTLYVMYHYYTWAVLAYTWY\nHLA-A*02:05,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:06,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:07,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:08,YYAMYGENVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:09,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:10,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:11,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:12,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:13,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYTWY\nHLA-A*02:14,YYAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A*02:16,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A*02:17,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:18,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:19,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:20,YFAMYGENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:21,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:22,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A*02:24,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:25,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:26,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:27,YFAMYGEKVAHTHVDTLYVRYHYYTWAAQAYTWY\nHLA-A*02:28,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:29,YFAMYGEQVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:30,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:31,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:33,YFAMYGEKVAHTHVDTLYVRSHYYTWAVLAYTWY\nHLA-A*02:34,YFAMYGEKVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:35,YFAMYGEKVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:36,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTGY\nHLA-A*02:37,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:38,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYRWY\nHLA-A*02:39,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:40,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:41,YYAMYGEKVAHTHVDTLYVRYQYYTWAVLAYTWY\nHLA-A*02:42,YFSMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:44,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:45,YFAMYQEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:46,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:47,YFAMYGEKVAHSHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:48,YFAMYEEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:49,YFAMYGEKVAHTHVDTLYVRYHYYTWAVRAYTWY\nHLA-A*02:50,YFAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*02:51,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:52,YFAMYGEKVAHTHVDTLYVRYEHYTWAVLAYTWY\nHLA-A*02:54,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:55,YFAMYRNNVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:56,YFAMYQENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:57,YYAMYGEKVAHTHVDTLYLMYHYYTWAVLAYTWY\nHLA-A*02:58,YFAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A*02:59,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:60,YFAMYGEKVAHTHVDTLYVRYHFYTWAVLAYTWY\nHLA-A*02:61,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:62,YFAMYGENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:63,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:64,YFAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A*02:65,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*02:66,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:67,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:68,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:69,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:70,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:71,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:72,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:73,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A*02:74,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:75,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:76,YSAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:77,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:78,YYAMYQENVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:79,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:80,YFAMYGEKVAHTHVDTLYVRYQDYTWAVLAYTWY\nHLA-A*02:81,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A*02:84,YYAMYGEKVAHTHVDTLYFRYHYYTWAVLAYTWY\nHLA-A*02:85,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:86,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:87,YFAMYGEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A*02:89,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:90,YFAMYGEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:91,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:92,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:93,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:95,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:96,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:97,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:99,YYAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:101,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYRWY\nHLA-A*02:102,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:103,YFAMYQENVAQTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:104,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A*02:105,YFAMYGEKVAHTHVDTLYVRYEYYTWAVLAYTWY\nHLA-A*02:106,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:107,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:108,YYAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:109,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:110,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:111,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:112,YFAMYGEKVAHTDENIAYVRCHYYTWAVLAYTWY\nHLA-A*02:114,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A*02:115,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:116,YFAMYGEKVAHTHLDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:117,YFAMYGEKVAHTHVDTLYVRYQDYTWAEWAYTWY\nHLA-A*02:118,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:119,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:120,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:121,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:122,YYAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*02:123,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:124,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A*02:126,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:127,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYKWY\nHLA-A*02:128,YFAMYGENVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:129,YYAMYEEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A*02:130,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:131,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A*02:132,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:133,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:134,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:135,YFAMYGEKVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*02:136,YFAMYGEKVAHTDENIAYVRYHYYTWAVWAYTWY\nHLA-A*02:137,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:138,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:139,YFAMYGEKVTHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:140,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:141,YFVMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:142,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:143,YYAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:144,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:145,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:146,YFAMYGEKVAHTDANTLYVRYHYYTWAVLAYTWY\nHLA-A*02:147,YFAMYGEKVAHTHVDTLYVRYDYYTWAVLAYTWY\nHLA-A*02:148,YFAMYGEKVAHTHVDTLYVRFHYYTWAEWAYTWY\nHLA-A*02:149,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:150,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:151,YFAMYGEKVAHTHVDTLYVRYDYYTWAVLAYTWY\nHLA-A*02:152,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*02:153,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:154,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYRWY\nHLA-A*02:155,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:156,YFAMYGEKVAHTHVDTLYIIYHYYTWAVLAYTWY\nHLA-A*02:157,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:158,YFAMYGEKVAHAHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:159,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:160,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:161,YFAVYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:162,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:163,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:164,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:165,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:166,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:167,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYKWY\nHLA-A*02:168,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:169,YYAMYQENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:170,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:171,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:172,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:173,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:174,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:175,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:176,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:177,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:178,YYAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A*02:179,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:180,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:181,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:182,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:183,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:184,YFAMYGEKVAHTHEDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:185,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:186,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:187,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:188,YFAMYGEKVAHTHVDTLYVRYDSYTWAVLAYTWY\nHLA-A*02:189,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:190,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:191,YFAMYGEKVAHTHVDTLYVRCHYYTWAVWAYTWY\nHLA-A*02:192,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:193,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:194,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:195,YFAMYQENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:196,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:197,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:198,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:199,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:200,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:201,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:202,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:203,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:204,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:205,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:206,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:207,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:208,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:209,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:210,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:211,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:212,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:213,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:214,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:215,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:216,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:217,YFAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:218,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:219,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:220,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:221,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:224,YFAMYGEKVAHTHVDTLYVGYHYYTWAVLAYTWY\nHLA-A*02:228,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:229,YYAMYGEKVAHTHVDTLYLRYRYYTWAVWAYTWY\nHLA-A*02:230,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:231,YFAMYGEKVAHTHVDTLYVRNHYYTWAVLAYTWY\nHLA-A*02:232,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:233,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTRY\nHLA-A*02:234,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:235,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:236,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:237,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:238,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:239,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:240,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:241,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:242,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:243,YTAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:244,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:245,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A*02:246,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A*02:247,YFAMYGEKVAHTDENTLYVRYHYYTWAVLAYTWY\nHLA-A*02:248,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:249,YFAMYVEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:251,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:252,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:253,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:254,YFAMYGEKVAHTHVDTLYVRYNFYTWAVLAYTWY\nHLA-A*02:255,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTGY\nHLA-A*02:256,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:257,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:258,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:259,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:260,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:261,YFAMYGEKVAHTHMDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:262,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLVYTWY\nHLA-A*02:263,YFAMYGEKVAHTHVDTLYVRYHYYTWSVLAYTWY\nHLA-A*02:264,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:265,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:266,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*03:01,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:02,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:04,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:05,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:06,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:07,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*03:08,YFAMYQENVAHTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:09,YFAMYQENVAQTHVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:10,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:12,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:13,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:14,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:15,YFAMYQENVAQTDVDTLYIIFRDYTWAELAYTWY\nHLA-A*03:16,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:17,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:18,YFAMYQENVAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*03:19,YFAMYQENVAQTDVDTLYIIFHYYTWAELAYTWY\nHLA-A*03:20,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:22,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:23,YFAMYGEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:24,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:25,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:26,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:27,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:28,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:29,YFAMYQENVVQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:30,YFAMYEEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:31,YFAMYQENVAQTDVDTLYIIYRYYTWAVQAYTWY\nHLA-A*03:32,YFAMYQENVAHIDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:33,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:34,YFAMYQENVAPTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:35,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:37,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:38,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:39,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:40,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:41,YFAMYQENVAHTDANTLYIIYRDYTWAELAYTWY\nHLA-A*03:42,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*03:43,YFAMYQENVAQTDVDTLYIIYEHYTWAELAYTWY\nHLA-A*03:44,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:45,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:46,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:47,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:48,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:49,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:50,YFAMYQENVAQTDVDTLYIIYRDYTWAEWAYTWY\nHLA-A*03:51,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:52,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:53,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:54,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:55,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:56,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:57,YFAMYQENVAQTDANTLYIIYRDYTWAELAYTWY\nHLA-A*03:58,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:59,CFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:60,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:61,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:62,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:63,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:64,YFAMYQENVAQTDVDTLYIIYRDYTWADLAYTWY\nHLA-A*03:65,YFAMYQENVAQTDVDTLYIIYRDYTWAEQAYTWY\nHLA-A*03:66,YFAMYQENVAQTDVDTLYIIYRDYTWAERAYTWY\nHLA-A*03:67,YFATYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:70,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:71,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:72,YSAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:73,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:74,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:75,YFAMYQENVAQTDVDTLYLMYRDYTWAELAYTWY\nHLA-A*03:76,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:77,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:78,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:79,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:80,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:81,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:82,YFAMYQENVAQTDVDTLYIIYEHYTWAVQAYTWY\nHLA-A*11:01,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:02,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:03,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A*11:04,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTWY\nHLA-A*11:05,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:06,YYAMYQENVAQTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:07,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:08,YYAMYQENVAQTDVDTLYIIYRDYTWAERAYRWY\nHLA-A*11:09,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:10,YYAMYRNNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:11,YYAMYLQNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:12,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:13,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:14,YYAMYQENVAQTDVDTLYIIYRDYTWARQAYRWY\nHLA-A*11:15,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:16,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:17,YYAMYQENMAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A*11:18,YYAMYQENVAHTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:19,YYAMYQENVAHTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:20,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A*11:22,YYAMYQENVAQTDVDTLYIIYPDYTWAAQAYRWY\nHLA-A*11:23,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:24,YYAMYQENVAQTDVDTLYIIYRDYTWAALAYRWY\nHLA-A*11:25,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYRWY\nHLA-A*11:26,YYAMYQENVAQTDVDTLYIMYRDYTWAAQAYRWY\nHLA-A*11:27,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTGY\nHLA-A*11:29,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:30,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:31,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYRWY\nHLA-A*11:32,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:33,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:34,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:35,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*11:36,YYAMYQENVAQTDVDTLYIICRDYTWAAQAYRWY\nHLA-A*11:37,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:38,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRGY\nHLA-A*11:39,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRGY\nHLA-A*11:40,YYAMYQENVAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A*11:41,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:42,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:43,YTAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:44,YYAMYQENVAQTDVDTLYIIYRDYTWAARAYRWY\nHLA-A*11:45,YYAMYQENVAQTDADTLYIIYRDYTWAAQAYRWY\nHLA-A*11:46,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:47,YHAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:48,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:49,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:51,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:53,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:54,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:55,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:56,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:57,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:58,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:59,YYAMYQENVAQTDVDTLYIIYGDYTWAAQAYRWY\nHLA-A*11:60,YYAMYQENVAQTDVDTLYIIYRDYTWAVQAYRWY\nHLA-A*11:61,YYAMYQENAAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:62,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:63,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:64,YYAMYQENVAQTDVDTLHIIYRDYTWAAQAYRWY\nHLA-A*23:01,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:02,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A*23:03,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:04,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A*23:05,CSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:06,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:09,YSAMYQENMAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:10,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYRGY\nHLA-A*23:12,YSAMYEEKVAHTHENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:13,YSAMYEEKVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:14,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:15,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:16,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:17,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:18,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:20,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:21,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:22,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:23,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:24,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:25,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:26,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:02,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:03,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:04,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:05,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:06,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A*24:07,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:08,YSAMYGEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:10,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRWY\nHLA-A*24:13,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:14,YSAMYEEKVAHTDENIAYVRYHYYTWAVQAYTGY\nHLA-A*24:15,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A*24:17,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A*24:18,YSAMYEEKVAHTDENIAYLMFHYYTWAELAYTWY\nHLA-A*24:19,YSAMYEEKVAQTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:20,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:21,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:22,YSAMYEEKVAHTDENIAYLMFHYYTWAVWVYTWY\nHLA-A*24:23,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:24,YSAMYRNNVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:25,CSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:26,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:27,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:28,YSAMYEEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:29,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:30,YSAMYEEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:31,YSAMYEQKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:32,YSAMYEEKVAHTDESIAYLMFHYYTWAVQAYTGY\nHLA-A*24:33,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:34,YSAMYEEKVAHIDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:35,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:37,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:38,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:39,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:41,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A*24:42,YSAMYGEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:43,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:44,YSAMYEEKVAHTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:46,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRGY\nHLA-A*24:47,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:49,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:50,YYAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:51,YSAMYEEKVAHTDENIAYLIYHYYTWAVQAYTGY\nHLA-A*24:52,YSAMYEEKVAHTDENIAYLRFHYYTWAVQAYTGY\nHLA-A*24:53,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A*24:54,YSAMYEEKVAHTDENIAYLMFHYYTWAVQPYTGY\nHLA-A*24:55,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYEGY\nHLA-A*24:56,YSAMYEEKVAHTDENIAYLMFHYYTWAEQAYTGY\nHLA-A*24:57,YSAMYEEKVAHTDENIAYIMYHYYTWAVQAYTGY\nHLA-A*24:58,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:59,YSAMYEEKVAHTDENIAYLMFHYYTWAAQAYTGY\nHLA-A*24:61,YSAMYEEKVAHTDEKIAYLMFHYYTWAVQAYTGY\nHLA-A*24:62,YSAMYEEKVAHTDENIAYLMFQDYTWAVQAYTGY\nHLA-A*24:63,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:64,YSAMYEEKVAHTDENIAYLWIHYYTWAVQAYTGY\nHLA-A*24:66,YSAMYEEKVAHTDENIAYLMFEHYTWAVQAYTGY\nHLA-A*24:67,YSAMYRNNVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:68,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:69,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:70,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:71,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:72,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:73,YSAMYEEKVAHTDENIAYLMFDYYTWAVQAYTGY\nHLA-A*24:74,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:75,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:76,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:77,YSAMYQEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:78,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:79,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:80,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:81,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:82,YTAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:85,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:87,YSAMYEEKVAHTDENIAYLMFHYYTWAVRAYTGY\nHLA-A*24:88,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:89,YSAMYGEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:91,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:92,YSAMYEEKVAHTDENIAYIIYHYYTWAVQAYTGY\nHLA-A*24:93,YSAMYEEKVAHTDENIAYVMFHYYTWAVQAYTGY\nHLA-A*24:94,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A*24:95,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:96,YSAMYEEKVAHTDENIAYLMFNFYTWAVQAYTGY\nHLA-A*24:97,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:98,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:99,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:100,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:101,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:102,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:103,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:104,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:105,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:106,YSAMYEEKVAHTDENIAYLMFDDYTWAVQAYTGY\nHLA-A*24:107,YSAMYEEKVAHTDENIAYLMFHYYTWAVHAYTGY\nHLA-A*24:108,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:109,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:110,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:111,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:112,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:113,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:114,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:115,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:116,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:117,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:118,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:119,YSAMYEEKVAHADENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:120,YSAMYEEKVAHTDENIAYIMFHYYTWAVQAYTGY\nHLA-A*24:121,YSAMYEEKVAHTDENIAYLMFHSYTWAVQAYTGY\nHLA-A*24:122,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:123,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:124,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:125,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:126,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:127,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:128,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:129,YSAMYQENMAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:130,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:131,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:133,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:134,YSAMYEEKVAHTDENIAYLMFHYYPWAVQAYTGY\nHLA-A*24:135,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:136,YSAMYEEKVAHTDENIAYLMFHYYTWVVQAYTGY\nHLA-A*24:137,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:138,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTWY\nHLA-A*24:139,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:140,YSTMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:141,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:142,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:143,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYLGY\nHLA-A*24:144,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*25:01,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:02,YYAMYRNNVAQTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:03,YFAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:04,YYAMYRNNVAHTDESIAYIRYQDYTWAEQAYRWY\nHLA-A*25:05,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:06,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYTWY\nHLA-A*25:07,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:08,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:09,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:10,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:11,YYAMYRNNVAHTHESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:13,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*26:01,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:02,YYAMYRNNVAHTDANTLYIRYQNYTWAEWAYRWY\nHLA-A*26:03,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:04,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYLWY\nHLA-A*26:05,YYAMYRNNVAHTDENTLYIRYQDYTWAEWAYRWY\nHLA-A*26:06,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:07,YYAMYGEKVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:08,YYAMYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A*26:09,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYTWY\nHLA-A*26:10,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:12,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A*26:13,YYAMYRNNVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:14,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:15,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:16,YSAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:17,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:18,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A*26:19,YYAMYQENVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:20,YFAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:21,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:22,YYAMYRNNVAHTDANTLYVRYQDYTWAEWAYRWY\nHLA-A*26:23,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:24,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:26,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:27,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:28,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:29,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A*26:30,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:31,YYAMYPNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:32,YYAMYRNNVAHTDANTLYMVYQDYTWAEWAYRWY\nHLA-A*26:33,YYAMYRNNVAQIHANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:34,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYWWY\nHLA-A*26:35,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:36,YYAMYRNNVAHTHANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:37,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:38,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:39,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:40,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:41,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:42,YYAIYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:43,YYAMYRNNVAHTDANTLYIRYQDYTWAELAYRWY\nHLA-A*26:45,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:46,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:47,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:48,YYAMYRNKVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A*26:49,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A*26:50,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*29:01,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:02,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:03,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTGY\nHLA-A*29:04,YTAMYLQHVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:05,YTAMYLQNVAQTDANTLYIMYRDYTWAEQAYTWY\nHLA-A*29:06,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:07,YTAMYLQNVAQTDANTLYLMFRDYTWAVLAYTWY\nHLA-A*29:09,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:10,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:11,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:12,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:13,YTAMYLQNVAQTDESIAYIMYRDYTWAVLAYTWY\nHLA-A*29:14,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:15,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:16,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:17,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:18,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:19,YTAMYLQNVAHTHVDTLYIMYRDYTWAVLAYTWY\nHLA-A*29:20,YTAMYLQNVAHTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:21,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:22,YTAMYLQNVAQTDANTLYVRYRDYTWAVLAYTWY\nHLA-A*30:01,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:02,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:03,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:04,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A*30:06,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A*30:07,YSAMYEEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:08,YYAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:09,YSAMYQENVAHTDENTLYIIYEHYTWAVLAYTWY\nHLA-A*30:10,YSAMYQENVAHTDENTLYIIHEHYTWARLAYTWY\nHLA-A*30:11,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:12,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:13,YSAMYQENVAHTHVDTLYIIYEHYTWARLAYTWY\nHLA-A*30:15,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:16,YSAMYQENVAQTHVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:17,YSAMYQENVAQTDVDTLYIIYEHYTWAVWAYTWY\nHLA-A*30:18,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:19,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:20,YSAMYQENVAQTEVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:22,YSAMYGEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:23,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:24,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:25,YSAMYQENVAQTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:26,YSAMYQENVAQTDVDTLYIIYRDYTWAWLAYTWY\nHLA-A*30:28,YSAMYQENVAHTDENTLYIVYEHYTWARLAYTWY\nHLA-A*30:29,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTSY\nHLA-A*30:30,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:31,YSAMYQENVARTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:32,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:33,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:34,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:35,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:36,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:37,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:38,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:39,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:40,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:41,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*31:01,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:02,YTAMYQEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:03,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*31:04,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*31:05,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A*31:06,YTAMYQENVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*31:07,YTAMYQEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:08,YTAMYEEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:09,YTAMYQENVGHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:10,YTAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:11,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:12,YTAMYQENVAHIDVDTLYIKYQDYTWAVLAYTWY\nHLA-A*31:13,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:15,YTAMYQENVARIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:16,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:17,YTAMYQENVAHINVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:18,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTRY\nHLA-A*31:19,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:20,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:21,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:22,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:23,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:24,YTAMYQENVAHIDVDTLYIMYQDYTWAAQAYRWY\nHLA-A*31:25,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYRWY\nHLA-A*31:26,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:27,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:28,YTAMYQENVTHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:29,YTAMYQENVAHIDVDTLYLMFQDYTWAVLAYTWY\nHLA-A*31:30,YTAMYQENVAHIDVDTLYIMYQDYTWAVWAYTWY\nHLA-A*31:31,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:32,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:33,YTAMYQENVAHIDGDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:34,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:35,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:36,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:37,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*32:01,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:02,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A*32:03,YFAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*32:04,YFAMYQENVAHTDESIAYIIYRDYTWAELAYTWY\nHLA-A*32:05,YFAMYQEKVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:06,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:07,YSAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:08,YFAMYQENVAHTHESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:09,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTRY\nHLA-A*32:10,YFAMYQENVAHTDESIAYIMYQDYTWAEWAYTWY\nHLA-A*32:12,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:13,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTGY\nHLA-A*32:14,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:15,YFAMYRNNVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:16,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:17,YFAMYQENVAQTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:18,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:20,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:21,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:22,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A*32:23,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:24,YFAMYQENMAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:25,YFAMYHENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*33:01,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:03,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:04,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:05,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:06,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:07,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:08,YTAMYGEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:09,YTAMYGENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:10,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A*33:11,YTAMYRNNVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:12,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:13,YTAMYRNNVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A*33:14,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:15,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:16,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:17,YTAMYRNNVAHIDADTLYIMYQDYTWAVLAYTWY\nHLA-A*33:18,YTAMYRNNVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*33:19,YTAMYRNNVAHIDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*33:20,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:21,YTAMYEENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:22,YTAMYRNNVAHIDVDTLYVRYQDYTWAVLAYTWY\nHLA-A*33:23,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:24,YTAMYRNNVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:25,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:26,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:27,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:28,YTAMYRNNVAHIDVDTLYIMYQDYTWAELAYTWY\nHLA-A*33:29,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:30,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:31,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*34:01,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A*34:02,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:03,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:04,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:05,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A*34:06,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYTWY\nHLA-A*34:07,YYAMYRNNVSQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:08,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*36:01,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A*36:02,YFAMYQENMAHTDANTLYIIYRDYTWVARAYTWY\nHLA-A*36:03,YFAMYQENMAHTDANTLYLMYRDYTWVARVYTWY\nHLA-A*36:04,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRWY\nHLA-A*36:05,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A*43:01,YYAMYLQNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*66:01,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:02,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A*66:03,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A*66:04,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWH\nHLA-A*66:05,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:06,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYRWY\nHLA-A*66:07,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:08,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:09,YYAMYRNNVAQTDVDTLYVRYQDYTWAEWAYRWY\nHLA-A*66:10,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRGY\nHLA-A*66:11,YYAMYRNNVAQTDADTLYIRYQDYTWAEWAYRWY\nHLA-A*66:12,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:13,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:14,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:15,YYAMYRNNVAHIDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*68:01,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:02,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:03,YYAMYRNNVAHTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:04,YYAMYRNNVAHIDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:05,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:06,YYAMYRNNVAQTDVDTLYIMYEHYTWAVWAYTWY\nHLA-A*68:07,YYAMYRNNVAQTDVDTLYIMYRHYTWAVWAYTWY\nHLA-A*68:08,YYAMYRNNVAQTDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*68:09,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTWY\nHLA-A*68:10,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:12,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:13,YYAMYRENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:14,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:15,YYAMYRNNVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:16,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:17,YYAMYRNNVAQTDVDTLYIMYRVYTWAVWAYTWY\nHLA-A*68:19,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:20,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:21,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:22,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:23,YYAMYRNNVAQTDVDTLYIRYRDYTWAVWAYTWY\nHLA-A*68:24,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:25,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:26,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTGY\nHLA-A*68:27,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:28,YYAMYRNNVAQTDVDTLYIRYHYYTWAVRAYTWY\nHLA-A*68:29,YTAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:30,YYAMYGENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:31,YYAMYRNNVAHTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:32,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:33,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:34,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYMWY\nHLA-A*68:35,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:36,YYAMYRNNVAQTDENIAYIMYRDYTWAVWAYTWY\nHLA-A*68:37,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:38,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:39,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:40,YYAMYRNNVGQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:41,YYAMYRNNVAQTDVDTLYIMYRDYTWVVWAYTWY\nHLA-A*68:42,YYAMYRNNVAQTDVDTLYIMYRDYTWAEWAYTWY\nHLA-A*68:43,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:44,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:45,YSAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:46,YYAMYRNNVAQTDVNTLYIMYRDYTWAVWAYTWY\nHLA-A*68:47,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:48,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:50,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:51,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:52,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:53,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:54,YYAMYRNNVAQTDVDTLYIRYHYYTWAEWAYTWY\nHLA-A*69:01,YYAMYRNNVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*74:01,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:02,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:03,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:04,YFAMYGEKVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:05,YFAMYQENVAHADVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:06,YFAMYQENVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:07,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:08,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:09,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:10,YFAMYQENVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A*74:11,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:13,YFAMYQENVAQTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*80:01,YFAMYEENVAHTNANTLYIIYRDYTWARLAYEGY\nHLA-A*80:02,YFAMYEENVAHTDVDTLYIIYRDYTWARLAYEGY\nHLA-B*07:02,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:03,YYSEYRNIYTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:04,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYEWY\nHLA-B*07:05,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:06,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:07,YYSEYRNIYAQTDESNLYLRYDYYTWAERAYEWY\nHLA-B*07:08,YYSEYRNIFTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:09,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B*07:10,YYSEYRNICAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:11,YYSEYRNIYAQTDENNLYLSYDSYTWAERAYEWY\nHLA-B*07:12,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:13,YYSGYREKYRQADVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:14,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:15,YYSEYRNIYAQADVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:16,YYSEYRNIYTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:17,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B*07:18,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:19,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYTWY\nHLA-B*07:20,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLSY\nHLA-B*07:21,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:22,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:23,YYSEYRNIYAQTDESNLHLSYDYYTWAERAYEWY\nHLA-B*07:24,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B*07:25,YYSEYRNIYAQTDESNLYLSYDYYTWAVDAYEWY\nHLA-B*07:26,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYEWY\nHLA-B*07:27,YYSEYRNISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B*07:28,YYSEYRNIYAQTDESNLYLSYDDYTWAERAYEWY\nHLA-B*07:29,YDSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:30,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:31,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYTWY\nHLA-B*07:32,YYSEYRNIFTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:33,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:34,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYTWY\nHLA-B*07:35,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:36,YYSEYRNIYAQTDENIAYLSYDYYTWAERAYEWY\nHLA-B*07:37,YYSEYRNIYANTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:38,YYSEYRNIFTNTYENIAYLSYDYYTWAERAYEWY\nHLA-B*07:39,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:40,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:41,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:42,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:43,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYTWY\nHLA-B*07:44,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:45,YYSEYRNIYAQTDESNLYLSYDYYTWAERTYEWY\nHLA-B*07:46,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:47,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:48,YYSEYRNIYAQTDESNLYLSYDYYTWAVLAYEWY\nHLA-B*07:50,YYSEYRNISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:51,YYSEYRNIYAQTDESNLYLSYDYYTWAARAYEWY\nHLA-B*07:52,YYSEYRNIYAQTDESNLYLSYDYYTWAERVYEWY\nHLA-B*07:53,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:54,YYSEYREIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:55,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYEWY\nHLA-B*07:56,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:57,YYSEYRNIYAQTDENNLYLSYDYYTWAERAYEWY\nHLA-B*07:58,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:59,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:60,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B*07:61,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:62,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:63,YYSDYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:64,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEGY\nHLA-B*07:65,YYATYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:66,YYSEYRNIYAQTDESNLYLSYDYYTWAEQAYEWY\nHLA-B*07:68,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:69,YYSEYRNICTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:70,YCSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:71,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:72,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:73,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:74,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:75,YYSEYRNIYAQTYENNLYLSYDYYTWAERAYEWY\nHLA-B*07:76,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:77,YYSEYRNIYAQTDESNLYLRSDYYTWAERAYEWY\nHLA-B*07:78,YYSEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B*07:79,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:80,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYEWY\nHLA-B*07:81,YYSEYRNIYAQTDESIAYLSYDYYTWAERAYEWY\nHLA-B*07:82,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:83,YYSEYRNIFAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:84,YYSEYRNIYAQTDESNLYWTYDYYTWAERAYEWY\nHLA-B*07:85,YYSEYRNICTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:86,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:87,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:88,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:89,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:90,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:91,YYSEYRNIYAQTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:92,YYSEYRNIYAQTDVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:93,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:94,YYSEYWNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:95,YYSEYRNIYAQTDESNLYFSYDYYTWAERAYEWY\nHLA-B*07:96,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:97,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:98,YYSEYRNIYAQTDESNLYLSYDYYTCAERAYEWY\nHLA-B*07:99,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:100,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYLWY\nHLA-B*07:101,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:102,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:103,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:104,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:105,YYSEYRNIYAQTVESNLYLSYNYYTWAERAYEWY\nHLA-B*07:106,YYSEYRNIYAQTDESNLYLSYDYYTRAERAYEWY\nHLA-B*07:107,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:108,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:109,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:110,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:112,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:113,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:114,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:115,YYSEYRNIYAQTDESNLYLSYNFYTWAERAYEWY\nHLA-B*08:01,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:02,YDSEYRNIFTNTDENTAYLSYNYYTWAVDAYTWY\nHLA-B*08:03,YDSEYRNIFTNTYENIAYLSYNYYTWAVDAYTWY\nHLA-B*08:04,YDSEYRNISTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:05,YDSEYRNTFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:07,YDSEYRNIFTNTDESNLYLSYDYYTWAVDAYTWY\nHLA-B*08:09,YDSEYRNIFTNTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*08:10,YDSEYRDIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:11,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYAWY\nHLA-B*08:12,YDSEYRNIFTNTDESNLYLRYNYYTWAVDAYTWY\nHLA-B*08:13,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYTWY\nHLA-B*08:14,YDSEYRNIFTNTDESNLYLSYHDYTWAVDAYTWY\nHLA-B*08:15,YDSEYRNIFTNTDVSNLYLSYNYYTWAVDAYTWY\nHLA-B*08:16,YDSEYRNIFTNADESNLYLRYNYYTWAVDAYTWY\nHLA-B*08:17,YDSEYREISTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B*08:18,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:20,YDSEYRNIFTNTDESNLYLSYNYYTWAERAYTWY\nHLA-B*08:21,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYLWY\nHLA-B*08:22,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:23,YDSEYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:24,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:25,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYLWY\nHLA-B*08:26,YYAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:27,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:28,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*08:29,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:31,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:32,YDSTYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:33,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:34,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:35,YDSEYRNIFTNTDESNLYLSYNSYTWAVDAYTWY\nHLA-B*08:36,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:37,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*08:38,YDSEYREIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:39,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:40,YDSEYRNIFTNTDESNLYLSYNYYTWAVRAYEWY\nHLA-B*08:41,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:42,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:43,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:44,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:45,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:46,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:47,YDSEYRNIFTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B*08:48,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:49,YDSEYRNIFTNTDESNLYIRSNFYTWAVDAYTWY\nHLA-B*08:50,YYSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:51,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:52,YDSEYRNIFTNTDESIAYLSYNYYTWAVDAYTWY\nHLA-B*08:53,YDSEYRNIFTNTDESNLYLSYNYYTWAEDAYTWY\nHLA-B*08:54,YDSEYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:55,YDSEYRNIFTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B*08:56,YDAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:57,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:58,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:59,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:60,YDSEYRNIFTNTDESNLYISYNYYTWAVDAYTWY\nHLA-B*08:61,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:62,YHSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*13:01,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:02,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:03,YYTMYREISTNTYENTAYWTYNLYTWAVLAYLWY\nHLA-B*13:04,YYTMYREISTNTYENTAYWTYDSYTWAVLAYLWY\nHLA-B*13:06,YYTMYREISTNTYENTAYIRYNLYTWAVLAYTWY\nHLA-B*13:09,YYTMYREISTNTYESNLYWTYNLYTWAVLAYEWY\nHLA-B*13:10,YYTMYREISTNTYENTAYLRYDSYTWAVLAYEWY\nHLA-B*13:11,YYTMYREISTNTYENTAYLRYNLYTWAVLAYEWY\nHLA-B*13:12,YYTMYREISTNTYENTAYIRYNLYTWAVLAYGWY\nHLA-B*13:13,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B*13:14,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:15,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWH\nHLA-B*13:16,YYTMYREISTNTYENTAYWTYNLYTWAELAYEWY\nHLA-B*13:17,YYAMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:18,YYTMYREISTNTYENTAYWTYNLYTWAVRAYEWY\nHLA-B*13:19,YYTMYREVSTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:20,YYTMYREISTNTYENTAYIRYNLYTWAELAYEWY\nHLA-B*13:21,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B*13:22,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:23,YHTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:25,YYTMYREISTNTYESTAYIRYNLYTWAVLAYEWY\nHLA-B*13:26,YYTMYREISTNTYENTAYIRYDSYTWAVLAYEWY\nHLA-B*13:27,YYTMYREISTNTYENTAYWTFNLYTWAVLAYEWY\nHLA-B*13:28,YYTMYREISTNTYENTACIRYNLYTWAVLAYEWY\nHLA-B*13:29,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:30,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:31,YYTMYREISTNTYENTAYWTYNLYTWAEWAYEWY\nHLA-B*13:32,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:33,YYAMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:34,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:35,YYTMYREISTNTYENTAYWTYDYYTWAVLAYEWY\nHLA-B*13:36,YYTMYRNISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:37,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:38,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:39,YYTMYREISTNTYENNLYIRYNLYTWAVLAYEWY\nHLA-B*14:01,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:02,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:03,YYSEYRNICTNTDESNLYLWYNFYTWAERAYTWH\nHLA-B*14:04,HYSEYRNNCTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:05,YYSEYRNICTNTDESNLYLSYNFYTWAELAYTWH\nHLA-B*14:06,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B*14:08,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B*14:09,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:10,YYSEYRNICTNTDESNLYIRYNFYTWAELAYTWH\nHLA-B*14:11,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:12,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:13,YYSEYRNICTNTDESNLYLSYNYYTWAELAYTWH\nHLA-B*14:14,YYSEYRNICTNTYESNLYLWYNFYTWAELAYTWH\nHLA-B*14:15,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:16,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:17,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:18,YYSEYRNICTNTYESNLYLWYNFYTWAELAYTWH\nHLA-B*15:01,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:02,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:03,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:04,YYAMYREISTNTYESNLYWTYDSYTWAEWAYLWY\nHLA-B*15:05,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:06,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B*15:07,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*15:08,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:09,YYSEYRNICTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B*15:10,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B*15:11,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:12,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B*15:13,YYAMYRNISTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B*15:14,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLSY\nHLA-B*15:15,YYAMYRNISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:16,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:17,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:18,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:19,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B*15:20,YYAMYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*15:21,YYAMYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:23,YYSEYRNICTNTYENIAYLRYDSYTWAELAYLWY\nHLA-B*15:24,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B*15:25,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:27,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B*15:28,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:29,YYSEYRNIFTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:30,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*15:31,YYAMYRNISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:32,YYAMYREISTNTYESNLYLRSDSYTWAEWAYLWY\nHLA-B*15:33,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:34,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:35,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B*15:36,YYAMYREISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B*15:37,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWH\nHLA-B*15:38,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWH\nHLA-B*15:39,YYAMYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:40,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B*15:42,YYAMYREISTNTYESNLYWTYNLYTWAELAYTWY\nHLA-B*15:43,YYAMYREISTNTYEDTLYLRYDSYTWAEWAYLWY\nHLA-B*15:44,YYAMYRNICTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B*15:45,YYAMYREISTNTYESNLYLSYDYYTWAEWAYLWY\nHLA-B*15:46,YYAKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:47,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*15:48,YYAMYREISTNTYESNLYLRYNYYTWAVLTYLWY\nHLA-B*15:49,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*15:50,YYAMYREISTNTYESNLYLRYDSYTWAEWAYTWY\nHLA-B*15:51,YYSEYRNICTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B*15:52,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*15:53,YYTKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:54,YYSEYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:55,YYAMYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*15:56,YYAMYREIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:57,YYAMYREISTNTYVNNLYLRYDSYTWAEWAYLWY\nHLA-B*15:58,YYAMYREISTNTYESNLYLRYNFYTWAEWAYLWY\nHLA-B*15:60,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:61,YYSEYREISTNTDESNLYLRYDSYTWAELAYLWY\nHLA-B*15:62,YYSEYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:63,YYAMYREISTNTYESNLYLRYDYYTWAEWAYLWY\nHLA-B*15:64,YYSEYRNISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:65,YYAMYREISTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B*15:66,YYAMYREICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:67,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:68,YYSEYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*15:69,YYSEYREISTNTYESNLYLRYDSYTWAELTYTWY\nHLA-B*15:70,YYAMYREISTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:71,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:72,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:73,YYAMYREISTNTYESNLYLRYNLYTWAEWAYLWY\nHLA-B*15:74,YYSEYREISINTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:75,YYAMYREISTNTYESNLYLRYDSYTWAQWAYLWY\nHLA-B*15:76,YYAMYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:77,YYAMYREISTNTYESNLYIRYDDYTWAEWAYLWY\nHLA-B*15:78,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:80,YYSEYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:81,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:82,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:83,YYAMYREISTNTYESNLYWTYNYYTWAVDAYTWY\nHLA-B*15:84,YYAMYREISTNTYESNLYLRFDSYTWAVRAYLWY\nHLA-B*15:85,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:86,YYAMYREISTNTYESNLYLRYNLYTWAVLAYTWY\nHLA-B*15:87,YYAMYREISTNTYESIAYLRYDSYTWAEWAYLWY\nHLA-B*15:88,YYAMYRNISTNTYESNLYIRYDSYTWATLAYLWY\nHLA-B*15:89,YYAMYRNISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B*15:90,YYSEYRNICTNTYESNLYLRYDYYTWAELVYLWY\nHLA-B*15:91,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLSY\nHLA-B*15:92,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:93,YYSEYRNICTNTYESNLYLRYDSYTWAELAYTWY\nHLA-B*15:95,YYAMYQENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:96,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:97,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:98,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:99,YYSEYRNICTNTYESNLYLRYDYYTWAERAYLWY\nHLA-B*15:101,YYAMYREIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:102,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:103,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:104,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:105,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:106,YYAKYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:107,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:108,YYSEYRNICTNTYESNLYLRYDSYTWAELTYLWY\nHLA-B*15:109,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B*15:110,YYAMYREISTNTYESNLYLRCDSYTWAEWAYLWY\nHLA-B*15:112,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:113,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:114,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*15:115,YYSEYRNICTNTYESTAYLRYDSYTWAELAYLWY\nHLA-B*15:116,YYAMYREISTNTYESNLYLRYDSYSLAEWAYLWY\nHLA-B*15:117,YYAMYREISTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B*15:118,YYAMYREISTNTYESNLYLMYDSYTWAEWAYLWY\nHLA-B*15:119,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:120,YYAMYRDISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:121,YYAMYRNISTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B*15:122,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:123,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:124,YYSEYRNICTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*15:125,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:126,YYAMYREISTNTYESNLYLSYDSCTWAEWAYLWY\nHLA-B*15:127,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:128,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:129,YYAMYREISTNTYESNLYLNYDSYTWAEWAYLWY\nHLA-B*15:131,YYSEYREISTNTYESNLYLRYDSYTWAELAYLSY\nHLA-B*15:132,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:133,YYSEYRNICTNTYESNLYLRYDFYTWAELAYLWY\nHLA-B*15:134,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:135,YYAMYREISTNTYENNLYLRYDSYTWAEWAYLWY\nHLA-B*15:136,YYAMYREISTNTYESNLYLRYDSYTWAVLTYLWY\nHLA-B*15:137,YYAMYREISTNTYESNLYWTYNFYTWAEWAYLWY\nHLA-B*15:138,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B*15:139,YYAMYRNISANTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:140,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:141,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B*15:142,YDAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:143,YYAKYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:144,YYAMYRNISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:145,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:146,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:147,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:148,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:150,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*15:151,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:152,YYAMYREIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:153,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:154,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:155,YYAMYREISTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B*15:156,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:157,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B*15:158,YYSEYREISTNTYESNLFLRYDSYTWAELAYLWY\nHLA-B*15:159,YYAMYREISTNTYESNLHLRYDSYTWAEWAYLWY\nHLA-B*15:160,YYAMHREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:161,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLSY\nHLA-B*15:162,YYAMYRENMASTYENIAYLRYHDYTWAALAYLWY\nHLA-B*15:163,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:164,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:165,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:166,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:167,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:168,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:169,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:170,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:171,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:172,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:173,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:174,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:175,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:176,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:177,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:178,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:179,YYAMYREISTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B*15:180,YDSEYRNIFTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:183,YYTMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:184,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:185,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWH\nHLA-B*15:186,YYSEYRNICTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B*15:187,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:188,YYAMYREISTNTYESNLYLRYNYYTWAVLAYTWY\nHLA-B*15:189,YYAMYRNICTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:191,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:192,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:193,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:194,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:195,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B*15:196,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:197,YYSEYRNICTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B*15:198,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:199,YYAMYREISTNTYESNLYLRYDSYTWAEDAYTWY\nHLA-B*15:200,YYSEYRNICTNTYESNLYLRYDSYTWATLAYLWY\nHLA-B*15:201,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:202,YYATYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*18:01,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:02,YHSTYRNISTNTYESNLYLNYDSYTWAVLAYTWH\nHLA-B*18:03,YHSTYRNISTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:04,YYATYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:05,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:06,YHSTYRNISTNTYVSNLYLRYDSYTWAVLAYTWH\nHLA-B*18:07,YHSTYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:08,YHSTYRNISTNTYESNLYLRCDSYTWAVLAYTWH\nHLA-B*18:09,YHSTYRNISTNTYENTAYLRYDSYTWAVLAYTWH\nHLA-B*18:10,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*18:11,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWY\nHLA-B*18:12,YHSTYREISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:13,YHSTYRNISTNTYESNLYLRYDSYTWAVRAYTWH\nHLA-B*18:14,YHSTYRNISTNTYESNLYLSYDSYTWAVLAYTWH\nHLA-B*18:15,YHSTYRNISTNTYESNLYLRYDSYTWAELAYTWH\nHLA-B*18:18,YHSTYRNISTNTYESNLYLRSDSYTWAVLAYTWH\nHLA-B*18:19,YHSTYRNISTNTYESNLYLRYDSYTWAEWAYTWH\nHLA-B*18:20,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:21,YHSTYRNISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*18:22,YHSTYRNISTNTYESNLYISYDSYTWAVLAYTWH\nHLA-B*18:24,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:25,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:26,YHSTYRNISTNTYESNLYLRYNYYTWAVLAYTWH\nHLA-B*18:27,YHSTYRNISTNTYESNLYLMFDSYTWAVLAYTWH\nHLA-B*18:28,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:29,YHATYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:30,YHSTYRNISTNTYESNLYLRYDSYTWAERAYTWH\nHLA-B*18:31,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:32,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:33,YHSTYRNICTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:34,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:35,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*18:36,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYLWH\nHLA-B*18:37,YHSEYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:38,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:39,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:40,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:41,YHSTYRNISTNTYESNLYLRYESYTWAVLAYTWH\nHLA-B*18:42,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:43,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:44,YHSTYRNISTNTYESNLYLWYDSYTWAVLAYTWH\nHLA-B*18:45,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:46,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:47,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:48,YHSKYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:49,YHSTYRNISTNTYENNLYLRYDSYTWAVLAYTWH\nHLA-B*18:50,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWH\nHLA-B*27:01,YHTEYREICAKTYENTAYLNYHDYTWAVLAYEWY\nHLA-B*27:02,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B*27:03,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:04,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*27:05,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:06,YHTEYREICAKTDESTLYLNYDYYTWAELAYEWY\nHLA-B*27:07,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B*27:08,YHTEYREICAKTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:09,YHTEYREICAKTDEDTLYLNYHHYTWAVLAYEWY\nHLA-B*27:10,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B*27:11,YHTEYREICAKTDESTLYLSYNYYTWAVLAYEWY\nHLA-B*27:12,YHTEYREICTNTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:13,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:14,YHTEYREICAKTDEDTLYWTYHDYTWAVLAYEWY\nHLA-B*27:15,YHTEYREICAKTDESTLYLNYHDYTWAELAYTWY\nHLA-B*27:16,YHTEYREICTNTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:17,YHTEFREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:18,YHTEYREISTNTYESNLYLNYHDYTWAELAYEWY\nHLA-B*27:19,YHTEYREICAKTDEDTLYIRYHDYTWAVLAYEWY\nHLA-B*27:20,YHTEYREICAKTDESTLYLNYNYYTWAELAYEWY\nHLA-B*27:21,YHTEYREICAKTDESTLYLRYDYYTWAELAYEWY\nHLA-B*27:23,YHTEYRNIFTNTYESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:24,YHTEYREICAKTDESTLYLSYNYYSWAELAYEWY\nHLA-B*27:25,YHTEYREICAKTDESTLYLNYHDYTWAEWAYLWY\nHLA-B*27:26,YHTEYREICAQTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:27,YHTEYREICAKTDEDTLYLNYNYYTWAVLAYEWY\nHLA-B*27:28,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYTWH\nHLA-B*27:29,YHTEYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:30,YHTEYREICAKTDENIAYIRYHDYTWAVLAYEWY\nHLA-B*27:31,YHTEYREICAQTDESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:32,YHTEYREICAKTDEDTLYLSYHDYTWAVLAYEWY\nHLA-B*27:33,YHTEYREICAKTDESNLYLSYNYYTWAVLAYEWY\nHLA-B*27:34,YHTEYREICAKTDEDTLYLSYDYYTWAVLAYEWY\nHLA-B*27:35,YHTEYREICAKTDEDTLYLNYNFYTWAVLAYEWY\nHLA-B*27:36,YHTEYREICAKTDESTLYLNYHDYSLAVLAYEWY\nHLA-B*27:37,YYTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:38,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYLWY\nHLA-B*27:39,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:40,YHTEYREICAKTDESNLYLNYHDYTWAELAYEWY\nHLA-B*27:41,YHTEYREICAKTDEDTLYLNYDSYTWAVLAYEWY\nHLA-B*27:42,YHTEYREICAKTDEDNLYLNYHDYTWAVLAYEWY\nHLA-B*27:43,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B*27:44,YHTEYREICAKTYESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:45,YHTEYREICAKTDEDTLYLNYHDYTWAVRAYEWY\nHLA-B*27:46,YHTEYREICAKTDEDTLYLNYHYYTWAVLAYEWY\nHLA-B*27:47,YHTEYREICAKTDEDTLYLNYHDYTWAVDAYLSY\nHLA-B*27:48,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:49,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:50,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYESY\nHLA-B*27:51,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:52,YHTTYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:53,YHTEYREICAKTDEDIAYLNYHDYTWAVLAYEWY\nHLA-B*27:54,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B*27:55,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:56,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:57,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B*27:58,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:60,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:61,YHTEYREICAKTDESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:62,YHTEYREICAKTDENIAYLNYHDYTWAVLAYTWH\nHLA-B*27:63,YHTEYREICAKTDESTLYLNYHDYTWAELAYLWY\nHLA-B*27:67,YHTMYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:68,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*27:69,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*35:01,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:02,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:03,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:04,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:05,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:06,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B*35:07,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:08,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:09,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:10,YYATYREIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:11,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*35:12,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:13,YYATYREIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:14,YYATYRNIFTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*35:15,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYEWY\nHLA-B*35:16,YYATYREIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:17,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:18,YYATYRNIFTNTYESNLYIRYNYYTWAVRAYLWY\nHLA-B*35:19,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:20,YYATYRNISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:21,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWH\nHLA-B*35:22,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYLWY\nHLA-B*35:23,YYATYRNIFTNTYESNLYIRFDSYTWAVLAYLWY\nHLA-B*35:24,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWH\nHLA-B*35:25,YYSEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:26,YYAEYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:27,YYATYRNIFTNTYENNLYIRYDSYTWAVLAYLWY\nHLA-B*35:28,YYATYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:29,YYATYRNIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:30,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:31,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*35:32,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*35:33,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYEWY\nHLA-B*35:34,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B*35:35,YYATYRNIFTNTYESNLYIRYDSYTWAVLTYTWY\nHLA-B*35:36,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:37,YYATYRNIFTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B*35:38,YYATYRNIFTNTYESNLYIRYDFYTWAVDAYLWY\nHLA-B*35:39,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B*35:41,YYATYRNIFTNTYESNLYIRYDSCTWAVLAYLWY\nHLA-B*35:42,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:43,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:44,YYATYRNIFTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*35:45,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLSY\nHLA-B*35:46,YYAMYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:47,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:48,YYATYRNIFTNTYESNLYIRSDSYTWAVLAYLWY\nHLA-B*35:49,YYAEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:50,YHATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:51,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:52,YYATYRNIFTNTYECNLYIRYDSYTWAVLAYLWY\nHLA-B*35:54,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:55,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:56,YYATYRNIFTNTYENNLYIRYDFYTWAVLAYLWY\nHLA-B*35:57,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:58,YYATYRNIFTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B*35:59,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B*35:60,YYATYRNIFTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*35:61,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:62,YYATYRNIFTNTYESNLYIRYDSYTWAVWAYLWY\nHLA-B*35:63,YHTKYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:64,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:66,YYATYRNIFTNTYESNLYLSYDSYTWAVRAYEWY\nHLA-B*35:67,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:68,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*35:69,YYATYREIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:70,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:71,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLSY\nHLA-B*35:72,YYATYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:74,YYATYRNIFTNTYVSNLYIRYDFYTWAVLAYLWY\nHLA-B*35:75,YYATYRNIFTNTYESNLYLRYDFYTWAVLAYLWY\nHLA-B*35:76,YYATYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:77,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:78,YYATYRNIFANTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:79,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:80,YYATYREIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:81,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWH\nHLA-B*35:82,YYATYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:83,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:84,YHTTYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:85,YYATYRNICTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:86,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B*35:87,YYATYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*35:88,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*35:89,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:90,YYTTYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:91,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:92,YYATYRNIFTNAYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:93,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYTWY\nHLA-B*35:94,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:95,YYATYRNISTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:96,YYATYRNIFTNTYESNLYIRYDFYTWAELAYTWH\nHLA-B*35:97,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:98,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:99,YYATYRNIFTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B*35:100,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:101,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:102,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:103,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:104,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:105,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:106,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:107,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:108,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:109,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWH\nHLA-B*35:110,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:111,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:112,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:113,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:114,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:115,YYATYRNIFTNTYESNLYIRYDSYTWAVDAYLWY\nHLA-B*35:116,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:117,YYATYRNIFTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*35:118,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:119,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:120,YYATYRNIFTNTYESNLYIRHDSYTWAVLAYLWY\nHLA-B*35:121,YYATYRNIFTNTYESNLYIRYHSYTWAVLAYLWY\nHLA-B*35:122,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:123,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:124,YYSTYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:125,YYSTYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:126,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:127,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:128,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:131,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:132,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:133,YYATYRNIFTNTYESNLYIRYVSYTWAVLAYLWY\nHLA-B*35:135,YYATYRNICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:136,YYATYRNIFTNTDESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:137,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:138,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:139,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:140,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:141,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:142,YYSTYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:143,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:144,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*37:01,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:02,YHSTYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*37:04,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWH\nHLA-B*37:05,YHSTYREISTNTYEDNLYIRSNFYTWAVDAYTWY\nHLA-B*37:06,YHSKYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:07,YHSTYREISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B*37:08,YHSTYRNISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:09,YHSTYREISTNTYEDTLYLSYDYYTWAVDAYTWY\nHLA-B*37:10,YHSTYREISTNTYENTAYIRSNFYTWAVDAYTWY\nHLA-B*37:11,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B*37:12,YHSTYREISTNTYEDTLYIRYNYYTWAVDAYTWY\nHLA-B*37:13,YHSTYREISTNTYEDTLYIRSNFYTWAEDAYTWY\nHLA-B*37:14,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B*37:15,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:17,YHSTYREISTNTYEDTLYIRSNFYTWTVDAYTWY\nHLA-B*37:18,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:19,YHSTYREISTNTYEDTLYIRYNFYTWAVDAYTWY\nHLA-B*37:20,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:21,YHSTYREIFTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:22,YHATYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:23,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*38:01,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:02,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:03,YYSEYREISTNTDESTAYLRYNFYTWAVLTYTWY\nHLA-B*38:04,YYSEYREICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:05,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:06,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:07,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:08,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYMWY\nHLA-B*38:09,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:10,YYSEYRNICTNTYENIAYLRYNFYTWAELAYTWY\nHLA-B*38:11,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:12,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:13,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:14,YYSEYRNICTNTDENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:15,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:16,YYTEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:17,YYSEYRNICTNTYEDTLYLRYNFYTWAVLTYTWY\nHLA-B*38:18,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:19,YYSEYRNICTNTYENIAYLSYNFYTWAVLTYTWY\nHLA-B*38:20,YYSEYRNICTNTYENIAYIRYNFYTWAVLTYTWY\nHLA-B*38:21,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:22,YYSEYRNICTNTYENIAYLNYNFYTWAVLTYKWY\nHLA-B*38:23,YYSEYRNICTNTYENTAYFRYNFYTWAVLTYTWY\nHLA-B*39:01,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:02,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:03,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:04,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:05,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:06,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B*39:07,YYSEYRNICTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B*39:08,YYSEYREISTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B*39:09,YYSEYRNICTNTDESNLYLRSNFYTWAVLTYTWY\nHLA-B*39:10,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:11,YYSEYRNICTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B*39:12,YDSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:13,YYSEYREISTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:14,YYSEYRNICTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B*39:15,YYSEYRNICTNTDESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:16,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:17,YYSEYRNIYTNTDESNLYLRYNLYTWAVLTYTWY\nHLA-B*39:18,YYSEYRNICTNTDESNLYLRYNFYTWAEWTYTWY\nHLA-B*39:19,YYSTYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:20,YYSEYRNIYTNTYENNLYLRYNFYTWAVLTYTWY\nHLA-B*39:22,YYSEYREICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:23,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:24,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:26,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:27,YYSEYRNICTNTDVSNLYLRYNFYTWAVLTYTWY\nHLA-B*39:28,YYSEYRNICTNTDESNLYLTYNFYTWAVLTYTWY\nHLA-B*39:29,YYSEYRNICTNTDESNLYLSYDYYTWAVLTYTWY\nHLA-B*39:30,YYSEYRNICTNTDESNLYLRYNFYTWAVLAYTWY\nHLA-B*39:31,YHSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:32,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYLWH\nHLA-B*39:33,YYSEYRNICTNTDESNLYWTYNFYTWAVRAYLWY\nHLA-B*39:34,YYSEYRNICTNTDESNLYWTYNFYTWAVLAYTWY\nHLA-B*39:35,YYSEYRNICTNTDESNLYLRYNFYTWAELTYTWY\nHLA-B*39:36,YYSEYRNICTNTDESNLYLRYNFYTWAEWAYTWY\nHLA-B*39:37,YYSEYRNICTNTYESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:39,YYSEYRNISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:41,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:42,YYSEYRNICTNTDESNLYIRYNFYTWAVLTYTWY\nHLA-B*39:43,YYSEYRNICTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B*39:44,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:45,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:46,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:47,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYEWY\nHLA-B*39:48,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWH\nHLA-B*39:49,YYSEYREISTNTYESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:50,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYEWY\nHLA-B*39:51,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:52,YYSEYRNICTDTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:53,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:54,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:55,YYSEYRNICTNTYESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:56,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:57,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B*39:58,YYSEYRNIFTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:59,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:60,YYSEYRNICTNTDESNLYLRYNFYTWAALTYTWY\nHLA-B*40:01,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:02,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:03,YHTKYREISTNTYESNLYLSYDSYTWAVLAYEWY\nHLA-B*40:04,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:05,YHTKYREISTNTYESNLYLSYNYYTWAELAYLWY\nHLA-B*40:06,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:07,YHTKYREIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:08,YHTKYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:09,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:10,YYAKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:11,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:12,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:13,YHTKYREIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*40:14,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:15,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*40:16,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*40:18,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:19,YHTKYREISTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*40:20,YHTKYREISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*40:21,YYAMYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:23,YHTKYREISTNTYESNLYLRYNYYSWAERAYEWY\nHLA-B*40:24,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:25,YHTKYRNISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:26,YHTKYREISTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B*40:27,YHTKYREISTNTYESNLYLSYNNYTWAVLAYEWY\nHLA-B*40:28,YHTKYREISTNTYESNLYIRYNYYTWAELAYLWH\nHLA-B*40:29,YHTKYPEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:30,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B*40:31,YHTKYREISTNTYESNLYLSYDYYSLAVLAYEWY\nHLA-B*40:32,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*40:33,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B*40:34,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B*40:35,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:36,YHTKYREISTNTYESNLYLRYNFYSLAVLAYEWY\nHLA-B*40:37,YHTKYREISTNTYENNLYLSYNYYTWAVLAYEWY\nHLA-B*40:38,YHTKYREISTNTYESNLYLRYNSYSLAVLAYEWY\nHLA-B*40:39,YHTKYREISTNTYESNLYLSYNYYTWAVLAYTWY\nHLA-B*40:40,YYTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:42,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B*40:43,YHTKYREISTNTDESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:44,YHTKYREISTNTYESNLYWTYDYYTWAVLAYEWY\nHLA-B*40:45,YHTKYREISTNTYESNLYLSYNYYSWAVLAYEWY\nHLA-B*40:46,YHTEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:47,YHTKYREISTNTYENTAYLRYNYYSLAVLAYEWY\nHLA-B*40:48,YHTKYREISTNTYESNLYLRYNLYSLAVLAYEWY\nHLA-B*40:49,YYTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:50,YHTKYREISTNTDESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:51,YHTKYREISTNTYESNLYLRYNYYSWAELAYTWH\nHLA-B*40:52,YHTKYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*40:53,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:54,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:55,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:56,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:57,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:58,YYAKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:59,YHTKYREISTNTYESNLYIRYDSYSLAVLAYEWY\nHLA-B*40:60,YHTKYREISTNTYESNLYLRSDSYSLAVLAYEWY\nHLA-B*40:61,YHTKYREIYTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:62,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:63,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B*40:64,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:65,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:66,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:67,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:68,YHTKYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:69,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:70,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:71,YHTKYREISTNTYESNLYLSYNLYTWAVLAYLWY\nHLA-B*40:72,YHTKYREISTNTYESNLYLRYNYYTLAVLAYEWY\nHLA-B*40:73,YHTKYREISTNTYVSNLYLRYNYYSLAVLAYEWY\nHLA-B*40:74,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:75,YHTKYREICTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:76,YHTKYREISTNTYESKLYLRYNYYSLAVLAYEWY\nHLA-B*40:77,YHTKYREISTNTYESNLYLRYNFYTLAVLAYEWY\nHLA-B*40:78,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:79,YHTKYREISTNTYESNLHLRYNYYSLAVLAYEWY\nHLA-B*40:80,YHTKYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*40:81,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:82,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:83,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:84,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:85,YHTKYREISTNTYESNLYLSYNYYIWAVLAYEWY\nHLA-B*40:86,YHTKYREISTNTYESNLYWTYNFYTWAVLAYEWY\nHLA-B*40:87,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:88,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:89,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:90,YHTKYREISTNTYESNLYLSYNYYTWAVLAHEWY\nHLA-B*40:91,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:92,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B*40:93,YHTEYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:94,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:95,YHTKYREISTNTYESNLYWTYNYYTWAELAYEWY\nHLA-B*40:96,YHTKYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B*40:97,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:98,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*40:99,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:100,YHTKYREISTNTYESNLYLRFNYYSLAVLAYEWY\nHLA-B*40:101,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:102,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:103,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:104,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:105,YHTKYREISTNTYESNLYLSYNSYTWAVLAYEWY\nHLA-B*40:106,YHTKYRNIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:107,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:108,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:109,YHTKYREISTNTYESIAYWTYNYYTWAVLAYEWY\nHLA-B*40:110,YYTKYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B*40:111,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:112,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:113,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWH\nHLA-B*40:114,YHTKYREISTNTYESNLYLRYNYYSWAVLAYEWY\nHLA-B*40:115,YHTKYWEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:116,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:117,YHTKYREISTNTYENIAYLRYNYYSLAVLAYEWY\nHLA-B*40:119,YHTKYREISTNTYDSNLYLSYNYYTWAVLAYEWY\nHLA-B*40:120,YHTKYREISTNTYESNLYIRYDYYTWAVLAYEWY\nHLA-B*40:121,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:122,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:123,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:124,YHTKYREISTNTYESNLYLRYHDYSLAVLAYEWY\nHLA-B*40:125,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:126,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:127,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:128,YHTKYREISTNTYESNLYLRYNYYSLAVRAYEWY\nHLA-B*40:129,YYTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:130,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:131,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:132,YHTKYREISTNTYESNLYLRYNYYSLAVLAYESY\nHLA-B*40:134,YHTKYREISTNIYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:135,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:136,YHTKYREISTNTYESNLYLRYNYYTWAVDAYEWY\nHLA-B*40:137,YYAMYREISTNTYESNLYIRYNYYSLAVLAYEWY\nHLA-B*40:138,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:139,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:140,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:141,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:143,YHTKYREISTNTYESNLYLSFNYYTWAVLAYEWY\nHLA-B*40:145,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:146,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:147,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*41:01,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:02,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:03,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B*41:04,YHTKYREISTNTYESNLYLSYDYYTWAVDAYTWY\nHLA-B*41:05,YHTKYREISTNTYESKLYWRYNYYTWAVDAYTWY\nHLA-B*41:06,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:07,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:08,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B*41:09,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:10,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:11,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:12,YHAKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*42:01,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:02,YHSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:04,YYSEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*42:05,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:06,YYSEYRNIYAQTDESNLYLSYNFYTWAVDAYTWY\nHLA-B*42:07,YYSEYRNIYTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:08,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:09,YHSEYRNIYAQTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*42:10,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:11,YYSEYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:12,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:13,YYSEYRNIYAQTDESNLYIRYNYYTWAVDAYTWY\nHLA-B*42:14,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*44:02,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:03,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:04,YYTKYREISTNTYENTAYIRYDDYTWAVRAYTSY\nHLA-B*44:05,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:06,YYATYRNIFTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B*44:07,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:08,YYTMYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:09,YYTKYREISTNTYESNLYIRYDDYTWAVDAYLSY\nHLA-B*44:10,YYTKYREISTNTYENTAYIRFNLYTWAVLAYLSY\nHLA-B*44:11,YYTKYREISTNTYENTPYIRYDDYTWAVDAYLSY\nHLA-B*44:12,YYTKYRNISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:13,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:14,YYTKYREISTNTYENTAYIRYNDYTWAVDAYLSY\nHLA-B*44:15,YHTKYREISTNTYESTAYWRYNLYTWAVDAYLSY\nHLA-B*44:16,YYTKYREISTNTYENTAYIRYDDYTWAVDAYEWY\nHLA-B*44:17,YYTKYREISTNTYENTAYIRYDSYTWAVDAYLSY\nHLA-B*44:18,YHTKYREISTNTYENIAYWRYNLYTWAVDAYLSY\nHLA-B*44:20,YYTKYREISTNTYENTAYWTYDDYTWAVDAYLSY\nHLA-B*44:21,YYTKYREISTNTYENTAYIRYDDYTWAVDAYESY\nHLA-B*44:22,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:24,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:25,YYTKYREISTNTYENIAYIRYDYYTWAVDAYLSY\nHLA-B*44:26,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:27,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:28,YYTKYREISTNTYENTAYIRYDDYTWAVRAYLSY\nHLA-B*44:29,YYTKYREISTNTYENTAYIRYDDYTWAVLTYLSY\nHLA-B*44:30,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:31,YYTKYREISTNTYENTAYLRYNYYSLAVLAYESY\nHLA-B*44:32,YYTKYPEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:33,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:34,YYAKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:35,YYTKYREISTNTYENTAYIRYDDYTWAVEAYLSY\nHLA-B*44:36,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:37,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLWY\nHLA-B*44:38,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:39,YYPKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:40,YYTEYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:41,YYTKYREISTNTYENTAYLRYDDYTWAVDAYLSY\nHLA-B*44:42,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLWY\nHLA-B*44:43,YYTKYREISTNTYENTAYIRYDSYTWAVLAYLSY\nHLA-B*44:44,YYTEYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:45,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:46,YYTKYREISTNTYESNLYIRYDDYTWAVLAYLSY\nHLA-B*44:47,YYTKYREISTNTYENTAYWTYDDYTWAVLAYLSY\nHLA-B*44:48,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:49,YDTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:50,YYTKYREISTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B*44:51,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:53,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:54,YYTKYREISTNTYENTAYLSYDDYTWAVLAYLSY\nHLA-B*44:55,YHTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:57,YYTMYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:59,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:60,YYTMYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:62,YYTKYREISTNTYENTAYIRYNYYTWAVDAYLSY\nHLA-B*44:63,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:64,YYTKYREISTNTYENTAYIRYDDYTWAVLAYEWY\nHLA-B*44:65,YYTKYREISTNTYENTAYLRYDDYTWAVLAYLSY\nHLA-B*44:66,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:67,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:68,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:69,YYTKYWEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:70,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:71,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:72,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:73,YYTKYREISTNTYENTAYIRYDDYTWAVDGYLSY\nHLA-B*44:74,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:75,YYTKYREISTNTYENNLYIRYDYYTWAVDAYLSY\nHLA-B*44:76,YYTKYREISTNTYENTAYIRYDDYTWAERAYLSY\nHLA-B*44:77,YYTKYREISTNTYENTAYIRYNYYTWAVLAYLSY\nHLA-B*44:78,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:79,YYTKYREISTNTYENTAYIRYDDYTWAELAYLSY\nHLA-B*44:80,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:81,YYTNYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:82,YYTKYREISTNTYENTAYIRYNYYTWAVLAYLSY\nHLA-B*44:83,YYATYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:84,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:85,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:86,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:87,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:88,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:89,YYTKYREISTNTYENTAYIRYDDYTWAVDTYLSY\nHLA-B*44:90,YYTKYREIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-B*44:91,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLWY\nHLA-B*44:92,YYTKYREISTNTYENTAYIRYDDYTWAMLAYLSY\nHLA-B*44:93,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:94,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:95,YYTKYREISTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B*44:96,YYTKYREISTNTYENTAYIGYDDYTWAVLAYLSY\nHLA-B*44:97,YYTKYREICAKTDENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:98,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:99,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:100,YYTKYREISTNTYENTAYWRYDDYTWAVDAYLSY\nHLA-B*44:101,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:102,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:103,YHTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:104,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:105,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:106,YYTKYREISTNTYENTAYLSYDDYTWAVDAYLSY\nHLA-B*44:107,YYTKYREISTNTYENTAYIRYDYYTWAVLAYLSY\nHLA-B*44:109,YYTKYREISTNTYESTAYIRYDDYTWAVLAYLSY\nHLA-B*44:110,YYTKYREISTNTYENTAYISYDDYTWAVLAYLSY\nHLA-B*45:01,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:02,YHTKYREISTNTYESNLYWRYNFYTWAVDAYLSY\nHLA-B*45:03,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:04,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLWY\nHLA-B*45:05,YHTKYREISTNTYESNLYWRYNLYTWAVDVYLSY\nHLA-B*45:06,YHTKYREIYAQTDESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:07,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:08,YHTKYREISTNTYESNLYWRYNLYTWAVDAYTWY\nHLA-B*45:09,YHTKYREISTNTYESNLYWRYDSYTWAVDAYLSY\nHLA-B*45:10,YHTKYREISTNTYESNLYWRYNLYTWAVDAYEWY\nHLA-B*45:11,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:12,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*46:01,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:02,YYAMYREKYRQTGVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:03,YYAMYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-B*46:04,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:05,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:06,YYAMYREKYRQTDVSNLYLRYDSYSLAVLAYEWY\nHLA-B*46:08,YYAMYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-B*46:09,YYAMYREKYRQTDVSNLYLRYDSYTWAVWAYLWY\nHLA-B*46:10,YYTMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:11,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYLWY\nHLA-B*46:12,YYAMYREKYRQTDVSNLYLSYDSYTWAEWAYLWY\nHLA-B*46:13,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYLWY\nHLA-B*46:14,YHAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:16,YYAMYREKFRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:17,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLSY\nHLA-B*46:18,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYTWY\nHLA-B*46:19,YYAMYREKYRQTDVSNLYLRYDSYTWAVLTYLWY\nHLA-B*46:20,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:21,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYTWY\nHLA-B*46:22,YYAMYREKYRRTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:23,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:24,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*47:01,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*47:02,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B*47:03,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B*47:04,YYTKYREISTNTYENTAYLNYHDYTWAVLAYEWY\nHLA-B*47:05,YYTKYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*47:06,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*47:07,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*48:01,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:02,YYSEYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*48:03,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*48:04,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:05,YYSEYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*48:06,YYSEYRNIFTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:07,YYSEYREISTNTYESNLYLSYNFYSLAVLAYEWY\nHLA-B*48:08,YYSEYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*48:09,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:10,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:11,YYSEYREISTNTYESNLYLSYNYYSLAVLAYELY\nHLA-B*48:12,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:13,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:14,YYSEYREISTNTYESNLYLSYNSYTLAVLAYEWY\nHLA-B*48:15,YYSEYREISTNTYESNLYLSYNYYSLAELAYEWY\nHLA-B*48:16,YYSEYRVISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:17,YYSEYREISTNTYESNLYIRYNFYSLAVLAYEWY\nHLA-B*48:18,YYSEYREISTNTYESIAYLSYNYYSLAVLAYEWY\nHLA-B*48:19,YYSEYREISTNTYESNLYLSYNYYSLAVWAYEWY\nHLA-B*48:20,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:21,YYSEYREISTNTYESNLYLNYNYYSLAVLAYEWY\nHLA-B*48:22,YHSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:23,YYSEYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*49:01,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:02,YHTKYREISTNTYENTAYWRYNLYTWAELAYLWY\nHLA-B*49:03,YHATYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:04,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B*49:05,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B*49:06,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:07,YHTKYREISTNTYENIAYWRYNLYTWAELAYEWY\nHLA-B*49:08,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:09,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWH\nHLA-B*49:10,YYTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*50:01,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:02,YHTKYREISTNTYESNLYWRYNLYTWAELAYLSY\nHLA-B*50:04,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:05,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:06,YHTRYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:07,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:08,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:09,YHTKYREISTNTYESNLYWRYNFYTWAELAYLWY\nHLA-B*51:01,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:02,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B*51:03,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLGH\nHLA-B*51:04,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B*51:05,YYATYRNIFTNTYENIAYWTYNYYTWAVRAYLWY\nHLA-B*51:06,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWH\nHLA-B*51:07,YYATYRNISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:08,YYATYRNIFTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B*51:09,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*51:12,YYATYRNIFTNTYENIADWTYNYYTWAELAYLWH\nHLA-B*51:13,YYATYRNIFTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B*51:14,YYATYRNIFTNTYENIAYWTYKYYTWAELAYLWH\nHLA-B*51:15,YYATYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B*51:16,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B*51:17,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:18,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:19,YYATYRNIFTNTYENIAYWTYNYYTWAVLTYLWH\nHLA-B*51:20,YYATYRNIFTNTDENIAYWTYNYYTWAVDAYLWH\nHLA-B*51:21,YYATYRNIFTNTYENIAYWTYNYYTWAELAYTWH\nHLA-B*51:22,YYATYRNICTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:23,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLSY\nHLA-B*51:24,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:26,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:28,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:29,YYATYRNIFTNTYENIAYWTYNYYTWAERAYLWH\nHLA-B*51:30,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:31,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWH\nHLA-B*51:32,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:33,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:34,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWY\nHLA-B*51:35,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:36,YYATYRNIFTNTYENIAYWTYNYYTWAEDAYTWY\nHLA-B*51:37,YYATYRNIFTNTYENIAYWTYDSYTWAELAYLWH\nHLA-B*51:38,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:39,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:40,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B*51:42,YYATYRNIFTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B*51:43,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:45,YYATYRNIFTNTYENIAYLRYDSYTWAELAYLWH\nHLA-B*51:46,YYATYRNIFTNTYENIAYITYNYYTWAELAYLWH\nHLA-B*51:48,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:49,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:50,YYATYRNIFTNTYENGLYWTYNYYTWAELAYLWH\nHLA-B*51:51,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:52,YYATYRNIFTNTHENIAYWTYNYYTWAELAYLWH\nHLA-B*51:53,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:54,YYATYRNIFTNTYENTAYWTYNYYTWAVRAYLWY\nHLA-B*51:55,YYATYRNIFTNTYENIAYWTYNYYTWAEQAYLWH\nHLA-B*51:56,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B*51:57,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:58,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:59,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWY\nHLA-B*51:60,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:61,YYATYRNIFTNTYENIAYWTYNYYTWAEWAYLWH\nHLA-B*51:62,YYATYRNIFTNTYENIAYLRYNLYTWAELAYLWH\nHLA-B*51:63,YYATYRNIFTNTYENIAYWTYNSYTWAELAYLWH\nHLA-B*51:64,YYATYRNIFTNTYENIAYLSYNYYTWAELAYLWH\nHLA-B*51:65,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:66,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:67,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:68,YDATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:69,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:70,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:71,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:72,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:73,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*51:74,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:75,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:76,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:77,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:78,YYATYRNIFTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B*51:79,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:80,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:81,YYATYRNIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*51:82,YYATYRNIFTNTYENIAYWTYNYYTWAERAYEWH\nHLA-B*51:83,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:84,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:85,YYATYRNIFTNTYENIAYWTYHDYTWAELAYLWH\nHLA-B*51:86,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:87,YYATYRNIFTNTYENIAYWTYNYYTWADLAYLWH\nHLA-B*51:88,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:89,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:90,YYATYRNIFTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B*51:91,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:92,YYATYRNIFTNTYENIAYWTYDFYTWAELAYLWH\nHLA-B*51:93,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWY\nHLA-B*51:94,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:95,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:96,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:01,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:02,YYAMYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:03,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B*52:04,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:05,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:06,YYATYREIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:07,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:08,YYATYREISTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B*52:09,YYATYREISTNTYESIAYWTYNYYTWAELAYLWH\nHLA-B*52:10,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*52:11,YYATYREISTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B*52:12,YYATYREISTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B*52:13,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:14,YYATYREISTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B*52:15,YYATYREISTNTYENIAYWTYNYYTWAELAYLSH\nHLA-B*52:16,YYSEYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:17,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:18,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:19,YYATYREISTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B*52:20,YYATYREISTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B*52:21,YYATYREISTNTYENIAYWTYNYYTWAEWAYLWH\nHLA-B*53:01,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:02,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWH\nHLA-B*53:03,YYATYRNIFTNTYEDTLYIRYDSYTWAVLAYLWY\nHLA-B*53:04,YYATYRNIFTNTYENIAYIRYDFYTWAVLAYLWY\nHLA-B*53:05,YYATYRNIFTNTYESIAYIRYDSYTWAVLAYLWY\nHLA-B*53:06,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWH\nHLA-B*53:07,YYATYRNIFTNTYENIAYIRSNFYTWAVLAYLWY\nHLA-B*53:08,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B*53:09,YYATYRNISTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B*53:10,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:11,YYATYRNIFTNTYENTAYIRYDSYTWAVRAYLWY\nHLA-B*53:12,YYATYRNIFTNTYESTAYIRYDSYTWAVLAYLWY\nHLA-B*53:13,YYATYRNIFTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B*53:14,YYATYRNIFTNTYENIAYLSYDSYTWAVLAYLWY\nHLA-B*53:15,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:16,YYATYRNIFTNTYESIAYIRYDSYTWAVRAYLWY\nHLA-B*53:17,YYATYREISTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:18,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:19,YYATYRNIFTNTYENIAYIRYNYYTWAVLAYLWY\nHLA-B*53:20,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:21,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:22,YYATYRNIFTNTYENIAYIRYDSYTWAVDAYLSY\nHLA-B*53:23,YYATYRNIFTNTDENIAYIRYDSYTWAVLAYLWY\nHLA-B*54:01,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:02,YHAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:03,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*54:04,YYAGYRNIYAQTDESNLYWTYNYYTWAVLAYTWY\nHLA-B*54:06,YYAGYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*54:07,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:09,YYAGYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B*54:10,YYAGYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B*54:11,YYAGYRNIYAQTDESNLYWTYNYYSWAVLAYTWY\nHLA-B*54:12,YYAGYRNIYAQTDENIAYWTYNLYTWAVLAYTWY\nHLA-B*54:13,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:14,YYAGYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B*54:15,YYAGYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B*54:16,YYAGYRNIYAQTDESNLYWTYDLYTWAVLAYTWY\nHLA-B*54:17,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:18,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:19,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:20,YYAGYRNIYAQTDESNLYWTYNLYTWAERAYTWY\nHLA-B*54:21,YYSGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:22,YYAGYRNIYAQTDESNLYWTYNLYSWAVLAYTWY\nHLA-B*54:23,YYAGYRNIYAQTEESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:01,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:02,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:03,YYAEYRNIYAQTDVSNLYWTYNLYTWAELAYTWY\nHLA-B*55:04,YYAEYRNIYAQTDESNLYLSYNYYTWAVLAYTWY\nHLA-B*55:05,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:07,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:08,YYAEYRNIYAQTDESNLYLRYNYYTWAVLAYLWY\nHLA-B*55:09,YYAEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B*55:10,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:11,YYAEYRNIYAQTDESNLYWMYNLYTWAELAYTWY\nHLA-B*55:12,YYAEYRNIYAQTDENNLYWTYNLYTWAVLAYTWY\nHLA-B*55:13,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYTWY\nHLA-B*55:14,YYAEYRNIYAQTDESNLYIVYDSYTWAELAYTWY\nHLA-B*55:15,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:16,YHAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:17,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B*55:18,YYAEYREISTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:19,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:20,YYAEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*55:21,YYAEYRNIYAQTDESNLYWTYNLYTWAEWAYTWY\nHLA-B*55:22,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYEWY\nHLA-B*55:23,YYAEYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B*55:24,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYEWY\nHLA-B*55:25,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:26,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:27,YYAEYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B*55:28,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B*55:29,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:30,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:31,YYAEYRNIYAQTYESNLYWTYNLYTWAELAYTWY\nHLA-B*55:32,YYAEYRNIYAQTDESNLYWTYNSYTWAVLAYTWY\nHLA-B*55:33,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:34,YYAEYREISAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:35,YYAMYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:36,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:37,YYAEYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B*55:38,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:39,YYAEYRNIYAQTDESNLYWTYNLHTWAVLAYTWY\nHLA-B*55:40,YYAEYREIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:41,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:42,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:43,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*56:01,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:02,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B*56:03,YYAEYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*56:04,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B*56:05,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B*56:06,YYATYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B*56:07,YYAEYRNIYAQTDENTAYWTYNLYTWAVLAYLWY\nHLA-B*56:08,YYAEYREKYGQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:09,YYAEYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*56:10,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYTWY\nHLA-B*56:11,YYAEYRNIYAQTDESNLYIRYDFYTWAVLAYLWY\nHLA-B*56:12,YYAEYRNIYAQTDESNLYIRYNYYTWAVLAYTWY\nHLA-B*56:13,YYAEYRNIYAQTDESNLYWTYNLYTWAVDAYLWY\nHLA-B*56:14,YYAEYREKYRQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:15,YYAEYRNIYAQTDESNLYWTYNYYTWAVLAYLWY\nHLA-B*56:16,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:17,YYAEYRNIYANTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:18,YYAEYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B*56:20,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:21,YYAEYRNIYAQTDENIAYWTYNYYTWAELAYLWH\nHLA-B*56:22,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYLWY\nHLA-B*56:23,YYAEYRNIYANTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*56:24,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:25,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYLWY\nHLA-B*56:26,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:27,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:29,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*57:01,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:02,YYAMYGENMASTYENIAYIVYNYYTWAVRAYLWY\nHLA-B*57:03,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B*57:04,YYAMYGENMASTYENIAYIVYDDYTWAVRAYLWY\nHLA-B*57:05,YYAMYGENMASTYENIAYIRYNYYTWAVRAYLWY\nHLA-B*57:06,YYAMYGENMASTYENIAYIVYDSYIWAVLAYLWY\nHLA-B*57:07,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLSY\nHLA-B*57:08,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:09,YYAMYGENMASTYENIAYIVYNYYTWAEDAYLWY\nHLA-B*57:10,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:11,YYAMYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*57:12,YYAMYGENMASTYESNLYIVYNYYTWAVRAYLWY\nHLA-B*57:13,YYAMYGENMASTYENIAYIVYDSYTWAERAYEWY\nHLA-B*57:14,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWH\nHLA-B*57:15,YYAMYGENVASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:16,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:17,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B*57:18,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:19,YYAMYGENMASTYENIAYIVYDSYTWAVRAYLWY\nHLA-B*57:20,YYAMYGKNMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:21,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:22,YYAMYGENMASTYENIAYIVYDSYTWAELAYLWY\nHLA-B*57:23,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:24,YYAMYGENMASTYENIAYIVYDSYTWAVDAYLWY\nHLA-B*57:25,YYAMYGENMASTYENIAYIVYDSYTWAVLAYEWY\nHLA-B*57:26,YYAMYGENMASTYENIAYIVYDSYTWAVLAYTWY\nHLA-B*57:27,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:29,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:30,YYAMYGENMASTYENIAYIVYDSYTWAARAYLWY\nHLA-B*57:31,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:32,YYAMYGENMASTYENIAYIVYHDYTWAVLAYLWY\nHLA-B*58:01,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:02,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*58:04,YYATYEENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:05,YYATYGENMASTYENIAYIRYDSYTLAALAYTWY\nHLA-B*58:06,YYATYGENMASTYENIAYLWYDSYTWAELAYLWY\nHLA-B*58:07,YYATYGENMASTYENIAYLWYDSYTWAVLAYLSY\nHLA-B*58:08,YYATYGENMASTYENIAYWTYNYYTWAELAYLWH\nHLA-B*58:09,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWH\nHLA-B*58:11,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:12,YYSTYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:13,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:14,YYATYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*58:15,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:16,YYATYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B*58:18,YYATYGENMASTYENIAYLSYDSYTWAVLAYLWY\nHLA-B*58:19,YYATYGENMASTYENIAYIRYDSYTWAELAYLWY\nHLA-B*58:20,YYATYGENMASTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*58:21,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:22,YYATYGENMASTYENIAYIRYDSYTWAVRAYLWY\nHLA-B*58:23,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:24,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:25,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*58:26,YYATYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B*58:27,YYATYGENMASTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*58:28,YYATYGENMASTYENIAYIRYNYYTWAVLAYLWY\nHLA-B*58:29,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:30,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*59:01,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B*59:02,YYAEYRNIFTNTYENIAYWTYNYYTWAVLAYTWY\nHLA-B*59:03,YYAEYRNIFTNTYENIAYWTYNFYTWAVLAYTWY\nHLA-B*59:04,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B*59:05,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B*67:01,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*67:02,YYSGYREKYRQADVSNLYLRYNFYTWAVLTYTWY\nHLA-B*73:01,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B*73:02,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B*78:01,YYATYRNIFTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B*78:02,YYATYRNIFTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B*78:03,YYATYRNICTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B*78:04,YYATYRNIFTNTYESNLYWTYNYYTWAVLAYLWY\nHLA-B*78:05,YYATYREISTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B*78:06,YYATYREISTNTYENNLYWTYNYYTWAELAYLWH\nHLA-B*78:07,YYATYRNIFTNTDESNLYWTYNYYTWAELAYTWH\nHLA-B*81:01,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:02,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:03,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:05,YYSEYRNIFAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*82:01,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B*82:02,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B*82:03,YYSEYRNIYAQTDESNLYLRYNLYTWAVDAYLSY\nHLA-B*83:01,YYSEYRNIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-C*01:02,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:03,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*01:04,YFSGYREKYRQTDVSNLYLWCDSYTWAEWAYTWY\nHLA-C*01:05,YFSGYREKYRQTDVSNLYLRSDYYTWAERAYTWY\nHLA-C*01:06,YFSGYREKYRQTDVSNLYLWCDYYTWAVRAYTWY\nHLA-C*01:07,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:08,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:09,YFSGYREKYRQTDVSNLYLWCDYYTWAEWAYTWY\nHLA-C*01:10,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYEWY\nHLA-C*01:11,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:12,YFSGYREKYRQTDVSNLYLWYDYYTWAERAYTWY\nHLA-C*01:13,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:14,YFSGYREKYRQTDVNKLYLWCDYYTWAERAYTWY\nHLA-C*01:15,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:16,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:17,YFSGYREKYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:18,YFSGYREKYHQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:19,YFSGYREKYRQTDVCNLYLWCDYYTWAERAYTWY\nHLA-C*01:20,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:21,YFSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*01:22,YFSGYREKYRQTDVSNLYLWCDYYTWAELAYTWY\nHLA-C*01:23,YFSGYREKYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:24,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*01:25,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:26,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:27,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:28,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:29,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTCY\nHLA-C*01:30,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWH\nHLA-C*01:31,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYMWY\nHLA-C*01:32,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:33,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:34,YFSGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C*01:35,YFSGYREKYRQTDVSNLYLWCDYYTWAELAYLWY\nHLA-C*01:36,YFSGYREKYRQTDVSNLYLRFDYYTWAERAYTWY\nHLA-C*01:38,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:39,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:40,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*02:02,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:03,YYAGYREKYRQTDVNKLYLRYDSYTWAVLAYEWY\nHLA-C*02:04,CYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:05,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYEWY\nHLA-C*02:06,YYAGYREKYRQTDVNKLYLRYDLYTWAEWAYEWY\nHLA-C*02:07,YYAGYREKYRQTDVNKLYLRYHDYTWAEWAYEWY\nHLA-C*02:08,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:09,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:10,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:11,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:12,YYAGYREKYRQADVSKLYLRYDSYTWAEWAYEWY\nHLA-C*02:13,YYAGYREKYRQTDVNKLYLRYDSYTWAAWAYEWY\nHLA-C*02:14,YDAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:15,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:16,YYAGYREKYRQTDVNKLYLRYDSYTWAELAYEWY\nHLA-C*02:17,YYAGYREKYRQTDVNKLYLWFDSYTWAEWAYEWY\nHLA-C*02:18,YYAGYREKYRQTDVNKLYLRYDSYTWAALAYEWY\nHLA-C*02:19,YYAGYREKYRQTDVNKLYLRYDYYTWAEWAYEWY\nHLA-C*02:20,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:21,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:22,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYEWY\nHLA-C*02:23,YYAGYREKYRQTDVNKLYLRYDYYTWAEWAYEWY\nHLA-C*02:24,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:26,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:27,YYAGYREKYRQTDVSNLYLRYDSYTWAEWAYEWY\nHLA-C*02:28,YYAGYREKYRQTDVNKLYLRYDSYTWAVWAYEWY\nHLA-C*02:29,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:30,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:31,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:32,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYEWY\nHLA-C*02:33,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:34,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:35,YYAGYREKYRQTDVNKLHLRYDSYTWAEWAYEWY\nHLA-C*02:36,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:37,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:39,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:40,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*03:01,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:02,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:03,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:04,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:05,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:06,YYAGYREKYRQTDVSNLYIRYVYYTWAELAYLWY\nHLA-C*03:07,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C*03:08,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:09,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:10,YYAGYREKYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C*03:11,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:12,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C*03:13,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C*03:14,YYAGYREKYRQTDVSNLYIRYDSYTLAALAYTWY\nHLA-C*03:15,YYAGYREKYRQADVNKLYLRYDSYTWAELAYLWY\nHLA-C*03:16,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYTWY\nHLA-C*03:17,YYAGYREKYRQTDVSNLYLWYDYYTWAELAYLWY\nHLA-C*03:18,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:19,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C*03:21,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C*03:23,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:24,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:25,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:26,YYAGYREKYRQTDVSNLYIRYDFYTWAELAYLWY\nHLA-C*03:27,YYAGYREKYRQADVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:28,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYLWY\nHLA-C*03:29,YYAGYRENYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C*03:30,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:31,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:32,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:33,YYAGYREKYRQTDVSNLCLRYDSYTWAELAYLWY\nHLA-C*03:34,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYEWY\nHLA-C*03:35,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C*03:36,YYAGYREKYRQTDVSNLYLRYDSYTWAVLAYLWY\nHLA-C*03:37,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:38,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:39,YDSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:40,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:41,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:42,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:43,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:44,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:45,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C*03:46,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:47,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:48,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:49,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYLWY\nHLA-C*03:50,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:51,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWH\nHLA-C*03:52,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:53,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:54,YYAGYREKYRQTDVSNLYIRYDYYTWAELPYLWY\nHLA-C*03:55,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C*03:56,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:57,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:58,YYAGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*03:59,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:60,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:61,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:62,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:63,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:64,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:65,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:66,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:67,YDAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:68,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:69,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:70,YYAGYREKYRQTDESNLYIRYDYYTWAELAYLWY\nHLA-C*03:71,YYAGYREKYRQTDVSNLYLWYDSYTWAELAYLWY\nHLA-C*03:72,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:73,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:74,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:75,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:76,HYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:77,YYAGYREKYRQTDVSNLYIRYDYYTWAVLAYLWY\nHLA-C*03:78,YYAGYREKYRQTDVSNLYIRYDYYTWAEMAYLWY\nHLA-C*03:79,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:80,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C*03:81,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:82,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:83,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:84,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:85,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:86,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*03:87,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:88,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:89,YYAGYREKYRQTDVSNLYLRFDSYTWAELAYLWY\nHLA-C*03:90,YYAGYREKYRQTDVSNLYIRSDYYTWAELAYLWY\nHLA-C*03:91,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:92,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C*03:93,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:94,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*04:01,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:03,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:04,YSAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C*04:05,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:06,YYAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C*04:07,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:08,YSAGYREKYRQADVNKLYLRFNFYTWAERAYLWY\nHLA-C*04:10,YSAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:11,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:12,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:13,YSAGYREKYRQADVNKLYLRFNFYTWAALAYTWY\nHLA-C*04:14,YSAGYREKYRQADVNKLYLRFNFYTWAEQAYTWY\nHLA-C*04:15,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*04:16,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:17,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*04:18,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:19,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:20,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:23,YSAGYREKYRQADVNKLYLRFDFYTWAERAYTWY\nHLA-C*04:24,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:25,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:26,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:27,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:28,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:29,YSAGYREKYRQADVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:30,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:31,YSAGYREKYRQADVNKLYLRFNFYTWVERAYTWY\nHLA-C*04:32,YSAGYREKYRQADVNKLYLRFNFYTWAERAYEWY\nHLA-C*04:33,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:34,YSAGYREKYRQADVNKLYLRFNFYTWAVLAYLWY\nHLA-C*04:35,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:36,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:37,YSAGYREKYRQADVNKLYLWCNFYTWAERAYTWY\nHLA-C*04:38,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:39,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:40,YSAGYREKYRQADVNKLYFRFNFYTWAERAYTWY\nHLA-C*04:41,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:42,YDAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:43,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:44,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:45,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:46,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:47,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:48,YSAGYREKYRQADVNKLYLRFNFYTWAERPYTWY\nHLA-C*04:49,YSAGYWEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:50,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:51,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:52,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:53,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:54,YSAGYREKYRQADVNKLYLRFDSYTWAERAYTWY\nHLA-C*04:55,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:56,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:57,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:58,YSAGYREKYRQADVNKLYLRFNFYTLAALAYTWY\nHLA-C*04:60,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:61,YSAGYREKYRQADVNKLYLRFNFYTWAARAYTWY\nHLA-C*04:62,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:63,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:64,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:65,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:66,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:67,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:68,YSAGYREKYRQADVNKLYLRFNFYTWAAQAYTWY\nHLA-C*04:69,YSAGYGEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:70,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*05:01,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:03,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:04,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYTWY\nHLA-C*05:05,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:06,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:08,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYTWY\nHLA-C*05:09,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:10,YYAGYREKYRQTDVNKLYIRYNFYTWAERAYTWY\nHLA-C*05:11,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYTWY\nHLA-C*05:12,YYAGYREKYRQTDVNKLYLRYNFYTWAVRAYTWY\nHLA-C*05:13,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:14,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:15,YYAGYWEKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:16,YYAGYREKYRQTDVNKLYLWYNFYTWAERAYTWY\nHLA-C*05:17,YYAGYREKYRQTDVNKLYLRYNFYTWAALAYTWY\nHLA-C*05:18,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:19,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:20,YYAGYREKYRQTDVNNLYLRYNFYTWAERAYTWY\nHLA-C*05:21,YYAGYREKYRQTDVNKLHLRYNFYTWAERAYTWY\nHLA-C*05:22,YYAGYREKYRQTDVNKLYLRYDFYTWAERAYTWY\nHLA-C*05:23,YYAGYREKYRQTDVNKLYLRYNFYTLAERAYTWY\nHLA-C*05:24,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:25,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:26,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:27,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYLWY\nHLA-C*05:28,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:29,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:30,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:31,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:32,YYAGYREKYRQTDVNRLYLRYNFYTWAERAYTWY\nHLA-C*05:33,YYAGCREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:34,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:35,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:36,YYAGYRENYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:37,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:38,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:39,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYLWY\nHLA-C*05:40,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:41,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:42,YYAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:43,YDAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:44,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:45,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*06:02,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:03,YYSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:04,YDSGYREKYRQADVNKLYLWYDSYTWAELAYTWY\nHLA-C*06:05,YDSGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C*06:06,YDSGYREKYRQADVNKLYLWYDSYTWAERAYTWY\nHLA-C*06:07,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:08,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYEWY\nHLA-C*06:09,YDSGYREKYRQADVNKLYLWYNFYTWAEWAYTWY\nHLA-C*06:10,YDPGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:11,YDSGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*06:12,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:13,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:14,YDSGYREKYRQADVNKLYIWYDSYTWAEWAYTWY\nHLA-C*06:15,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:17,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:18,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:19,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:20,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:21,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:22,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:23,YDSGYREKYRQADVNKLYLWCDSYTWAEWAYTWY\nHLA-C*06:24,YDSGYREKYRQADVNKLYLWYDSYTWAEWAHTWY\nHLA-C*06:25,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:26,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:27,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:28,YDAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:29,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:30,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:31,YDSGYREKYRQADVNKLYLWYDSYTWAAWAYTWY\nHLA-C*06:32,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:33,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:34,YDSGYREKYRQADVNKLYLWYDFYTWAEWAYTWY\nHLA-C*06:35,YDSGYREKYRQADVNKLYIRSDSYTWAEWAYTWY\nHLA-C*06:36,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:37,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:38,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:39,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:40,YDSGYREKYRQADVNKLYLWYDSYTWAEWTYTWY\nHLA-C*06:41,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:42,YDSGYREKYRQADVNKLYLWYDSYTRAEWAYTWY\nHLA-C*06:43,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:44,YDSGYRENYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:45,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*07:01,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:02,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:03,YDSGYREKYRQADVSNLYLRSDSYTWAALAYLWY\nHLA-C*07:04,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:05,YDSGYREKYRQADVSNLYLNYDSYTLAALAYTWY\nHLA-C*07:06,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:07,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C*07:08,YDSGYREKYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C*07:09,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C*07:10,YDSGYREKYRQADVSNLYIRSDSYTLAALAYTWY\nHLA-C*07:11,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:12,YDSGYREKYRQADVSNLYFRYDFYTWAADAYTWY\nHLA-C*07:13,YDSGYREKYRQADVSNLYLRSDFYTLAALAYTWY\nHLA-C*07:14,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:15,YDSGYREKYRQADVSNLYLRSDSYTLAALAYEWY\nHLA-C*07:16,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:17,YDSGYREKYRQADVSNLYLRSDSYTWAALAYTWY\nHLA-C*07:18,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:19,YDSGYRENYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:20,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:21,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:22,YDSGYRENYRQADVSNLYLRYDSYTLAAWAYTWY\nHLA-C*07:23,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:24,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:25,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:26,YYSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:27,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:28,YDSGYRENYRQADVSNLYLRYNFYTLAALAYTWY\nHLA-C*07:29,YDSGYREKYRQADVSNLYLRSDYYTLAALAYTWY\nHLA-C*07:30,YDSGYRENYRQADVSNLYLRYDSYTLAGLAYTWY\nHLA-C*07:31,YDSGYREKYRQADVSNLYLWYDSYTLAALAYTWY\nHLA-C*07:35,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:36,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:37,YDSGYREKYRQADVSNLYLRSDSYTLAARAYTWY\nHLA-C*07:38,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:39,YDSGYREKYRQTDVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:40,YDSGYRENYRQTDVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:41,YDSGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*07:42,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:43,YDSGYREKYRQADVSNLYIRYDSYTLAALAYTWY\nHLA-C*07:44,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:45,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:46,YDSEYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:47,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:48,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:49,YDSGYREKYRQADVNNLYLRSDSYTLAALAYTWY\nHLA-C*07:50,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:51,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:52,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:53,YDSGYRENYRQADVSNLYLRYDSYTLAAQAYTWY\nHLA-C*07:54,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:56,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:57,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:58,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:59,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:60,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:62,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:63,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:64,YSAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:65,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:66,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:67,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:68,YDSGYREKYRQADVSNLYLRSDSYTLAADAYTWY\nHLA-C*07:69,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:70,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:71,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:72,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:73,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:74,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:75,YDSGYREKYRQADVSNLHLRSDSYTLAALAYTWY\nHLA-C*07:76,YDSGYREKYRQADVNKLYLRSDSYTLAALAYTWY\nHLA-C*07:77,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:78,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:79,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:80,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:81,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:82,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:83,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:84,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:85,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:86,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:87,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:88,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:89,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:90,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:91,YDSGYRENYRQADVSNLYLRYDSYTLTALAYTWY\nHLA-C*07:92,YYAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:93,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:94,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:95,YDSGYRENYRQADVSNLYLRYDSYTLAVLAYTWY\nHLA-C*07:96,YYAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:97,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:99,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:100,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:101,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:102,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:103,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:105,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:106,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:107,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:108,YDSGYRENYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C*07:109,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:110,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:111,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:112,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:113,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:114,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:115,YDSGYRENYRQADVSDLYLRYDSYTLAALAYTWY\nHLA-C*07:116,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:117,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:118,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:119,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:120,YDSGYRENYRQADVSNLYLRYDSYTLAALAYPWY\nHLA-C*07:122,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:123,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:124,YDSGYRENYRQADESNLYLRYDSYTLAALAYTWY\nHLA-C*07:125,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:126,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:127,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:128,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:129,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:130,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:131,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:132,DDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:133,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:134,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:135,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:136,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:137,YDSGYREKYRQADVSNLYLRSDSYTLAALTYTWY\nHLA-C*07:138,YDSGYREKYRQADVSNLYLRSDSYTLAAWAYTWY\nHLA-C*07:139,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:140,YDSGYRENYRQADVSNLYLRYDSYTWAVDAYTWY\nHLA-C*07:141,YDSGYRENYRQADVSNLYLRYDSYTWAALAYTWY\nHLA-C*07:142,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:143,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:144,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:145,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:146,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:147,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:148,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:149,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*08:01,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:02,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:03,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:04,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C*08:05,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:06,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYAWY\nHLA-C*08:07,YYAGYREKYRQTDVSNLYLRYNFYTLAERAYTWY\nHLA-C*08:08,YYAGYREKYRQTDVSNLYLSYNFYTWATLAYTWY\nHLA-C*08:09,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C*08:10,YYAGYREKYRQTDVNKLYLRYNFYTWATLAYTWY\nHLA-C*08:11,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C*08:12,YYAGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C*08:13,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C*08:14,YYSGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:15,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:16,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:17,YYAGYREKYCQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:18,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:19,YYAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*08:20,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:21,YYAGYREKYRQADVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:22,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:23,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:24,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:25,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:27,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C*08:28,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:29,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C*08:30,YYAGYREKYRQTDVSNLYLRYNFYTWPERAYTWY\nHLA-C*08:31,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYEWY\nHLA-C*08:32,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:33,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:34,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:35,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*12:02,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:03,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:04,YYAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:05,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:06,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:07,YYAGYREKYRQADVGNLYLWYDSYTWAEWAYTWY\nHLA-C*12:08,YYAGYRENYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:09,YYAGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C*12:10,YYAGYREKYRQADVSNLYLRFDSYTWAEWAYTWY\nHLA-C*12:11,YYAGYREKYRQADVSNLYLWSDSYTWAEWAYTWY\nHLA-C*12:12,YYAGYREKYRQADESNLYLWYDSYTWAEWAYTWY\nHLA-C*12:13,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:14,YYAGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*12:15,YYAGYREKYRQADVSNLYLWYDLYTWAEWAYTWY\nHLA-C*12:16,YDSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:17,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:18,YYAGYREKYRQADVSNLYLRYDSYTWAELAYTWY\nHLA-C*12:19,YYAGYREKYRQADVSNLYLWYDSYTWAECAYTWY\nHLA-C*12:20,YYAGYREKYRQADVSNLYLWYDSYTWAELAYTWY\nHLA-C*12:21,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYTWY\nHLA-C*12:22,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:23,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:24,YYAGYREKYRQADVSNLYLWYDSYTWAERAYTWY\nHLA-C*12:25,YYAGYPEKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:26,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:27,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:28,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:29,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:30,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:31,YYAGYREKYRQADVSNLYLWYNFYTWAEWAYTWY\nHLA-C*12:32,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:33,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:34,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:35,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:36,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:37,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:38,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:40,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:41,YYAGYREKYRQADVNKLYLRYDSYTWAEWAYTWY\nHLA-C*12:43,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:44,YYAGYREKYRQADVSNLYIRYDSYTWAEWAYTWY\nHLA-C*14:02,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:03,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:04,YSAGYREKYRQADVNNLYLWFDSYTWAERAYTWY\nHLA-C*14:05,YSAGYREKYRQTDVSNLYLWYDSYTWAERAYTWY\nHLA-C*14:06,YSAGYREKYRQTDVSNLYLWFDSYTWAELAYTWY\nHLA-C*14:08,YSAGYREKYRQTDVSNLYPWFDSYTWAERAYTWY\nHLA-C*14:09,YSAGYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-C*14:10,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:11,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:12,YSAGYREKYRQTDVNKLYLWFDSYTWAERAYTWY\nHLA-C*14:13,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:14,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:15,YSAGYREKYRQTDVSNLYLWFDSYTWAALAYTWY\nHLA-C*14:16,YSAGYREKYRQTDVSNLYLWFDSYTWAEWAYTWY\nHLA-C*14:17,YSAGYREKYRQTDVSNLYLWFDSYTLAARAYTWY\nHLA-C*14:18,YSSGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:19,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:20,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYLWY\nHLA-C*15:02,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:03,YYAGYRENYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:04,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:05,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:06,YYAGYRENYRQTDVNKLYIRYDYYTWAELAYTWY\nHLA-C*15:07,YYAGYRENYRQTDVSNLYIRYDLYTWAELAYTWY\nHLA-C*15:08,YYAGYRENYRQTDVNKLYIRYDLYTWAERAYTWY\nHLA-C*15:09,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:10,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:11,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:12,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:13,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:15,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYLWY\nHLA-C*15:16,YYAGYREKYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:17,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:18,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:19,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:20,YYAGYREKYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:21,YYAGYRENYRQTDVSKLYIRYDLYTWAELAYTWY\nHLA-C*15:22,YYAGYRENYRQTDVNKLYLRYDFYTWAELAYTWY\nHLA-C*15:23,YDAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:24,YYAGYRENYRQTDVNKLYIRYNYYTWAELAYTWY\nHLA-C*15:25,YYAGYREKYRQADVSNLYIRYNFYTWAEDAYTSY\nHLA-C*15:26,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:27,YYAGYRNKYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:28,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:29,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:30,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:31,YYAGYRENYRQTDVNKLYIRYDLYTWAALAYTWY\nHLA-C*15:33,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:34,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:35,YYAGYRENYRQTDVNKLHIRYDLYTWAELAYTWY\nHLA-C*16:01,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:02,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:04,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C*16:06,YYAGYREKYRQTDVSNLYLRSDSYTWAAQAYTWY\nHLA-C*16:07,YYAGYREKYRQTDVSNLYLRYDSYTWAAQAYTWY\nHLA-C*16:08,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:09,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:10,YYAGYREKYRQTDVSNLYLWYDDYTWAAQAYTWY\nHLA-C*16:11,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:12,YYAGYGEKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:13,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:14,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:15,YYAGYREKYRQADVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:17,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:18,YYAGYREKYRQTDVSNLYLWCDSYTWAAQAYTWY\nHLA-C*16:19,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:20,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:21,YDAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:22,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:23,YYAGYREKYRQTDVSNLYLWFDSYTWAAQAYTWY\nHLA-C*16:24,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:25,YYAGYREKYRQADVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:26,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*17:01,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:02,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:03,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:04,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:05,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:06,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:07,YYAGYREKYRQADVNKLYIRYNFYSLAELAYLWY\nHLA-C*18:01,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*18:02,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*18:03,YDSGYREKYRQADVNKLYLRFNFYTWAEWAYEWY\n"
  },
  {
    "path": "downloads-generation/models_class1_unselected/generate_hyperparameters.py",
    "content": "\"\"\"\nGenerate grid of hyperparameters\n\"\"\"\n\nfrom sys import stdout\nfrom copy import deepcopy\nfrom yaml import dump\n\nbase_hyperparameters = {\n    ##########################################\n    # ENSEMBLE SIZE\n    ##########################################\n    \"n_models\": 4,\n\n    ##########################################\n    # OPTIMIZATION\n    ##########################################\n    \"max_epochs\": 500,\n    \"patience\": 20,\n    \"early_stopping\": True,\n    \"validation_split\": 0.1,\n    \"minibatch_size\": None,\n    \"loss\": \"custom:mse_with_inequalities\",\n\n    ##########################################\n    # RANDOM NEGATIVE PEPTIDES\n    ##########################################\n    \"random_negative_rate\": 0.0,\n    \"random_negative_constant\": 25,\n    \"random_negative_affinity_min\": 20000.0,\n    \"random_negative_affinity_max\": 50000.0,\n\n    ##########################################\n    # PEPTIDE REPRESENTATION\n    ##########################################\n    # One of \"one-hot\", \"embedding\", or \"BLOSUM62\".\n    \"peptide_amino_acid_encoding\": \"BLOSUM62\",\n    \"use_embedding\": False,  # maintained for backward compatability\n    \"embedding_output_dim\": 8,  # only used if using embedding\n    \"kmer_size\": 15,\n\n    ##########################################\n    # NEURAL NETWORK ARCHITECTURE\n    ##########################################\n    \"locally_connected_layers\": [\n        {\n            \"filters\": 8,\n            \"activation\": \"tanh\",\n            \"kernel_size\": 3\n        }\n    ],\n    \"activation\": \"tanh\",\n    \"output_activation\": \"sigmoid\",\n    \"layer_sizes\": [16],\n    \"dense_layer_l1_regularization\": None,\n    \"batch_normalization\": False,\n    \"dropout_probability\": 0.0,\n\n    ##########################################\n    # TRAINING Data\n    ##########################################\n    \"train_data\": {\"subset\": \"all\", \"pretrain_min_points\": 1000},\n}\n\ngrid = []\nfor train_subset in [\"all\", \"quantitative\"]:\n    for minibatch_size in [128]:\n        for dense_layer_size in [8, 16, 32, 64]:\n            for l1 in [0.0, 0.001]:\n                for num_lc in [0, 1, 2]:\n                    for lc_kernel_size in [3, 5]:\n                        new = deepcopy(base_hyperparameters)\n                        new[\"minibatch_size\"] = minibatch_size\n                        new[\"train_data\"][\"subset\"] = train_subset\n                        new[\"layer_sizes\"] = [dense_layer_size]\n                        new[\"dense_layer_l1_regularization\"] = l1\n                        (lc_layer,) = new[\"locally_connected_layers\"]\n                        lc_layer['kernel_size'] = lc_kernel_size\n                        if num_lc == 0:\n                            new[\"locally_connected_layers\"] = []\n                        elif num_lc == 1:\n                            new[\"locally_connected_layers\"] = [lc_layer]\n                        elif num_lc == 2:\n                            new[\"locally_connected_layers\"] = [lc_layer, deepcopy(lc_layer)]\n                        if not grid or new not in grid:\n                            grid.append(new)\n\ndump(grid, stdout)\n"
  },
  {
    "path": "downloads-generation/models_class1_unselected_with_mass_spec/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Train standard MHCflurry Class I models.\n# Calls mhcflurry-class1-train-allele-specific-models on curated training data\n# using the hyperparameters in \"hyperparameters.yaml\".\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=models_class1_unselected_with_mass_spec\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\n\nmkdir models\n\ncp $SCRIPT_DIR/class1_pseudosequences.csv .\n\npython $SCRIPT_DIR/generate_hyperparameters.py > hyperparameters.yaml\n\nGPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0\necho \"Detected GPUS: $GPUS\"\n\nPROCESSORS=$(getconf _NPROCESSORS_ONLN)\necho \"Detected processors: $PROCESSORS\"\n\ntime mhcflurry-class1-train-allele-specific-models \\\n    --data \"$(mhcflurry-downloads path data_curated)/curated_training_data.with_mass_spec.csv.bz2\" \\\n    --allele-sequences class1_pseudosequences.csv \\\n    --hyperparameters hyperparameters.yaml \\\n    --out-models-dir models \\\n    --held-out-fraction-reciprocal 10 \\\n    --min-measurements-per-allele 25 \\\n    --num-jobs $(expr $PROCESSORS \\* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50\n\ncp $SCRIPT_ABSOLUTE_PATH .\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/models_class1_unselected_with_mass_spec/README.md",
    "content": "# Class I allele-specific models (ensemble)\n\nThis download contains trained MHC Class I MHCflurry models.\n\nTo generate this download run:\n\n```\n./GENERATE.sh\n```"
  },
  {
    "path": "downloads-generation/models_class1_unselected_with_mass_spec/class1_pseudosequences.csv",
    "content": "allele,pseudosequence\nHLA-A*01:01,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:02,YSAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:03,YFAMYQENMAHTDANTLYIMYRDYTWVARVYRGY\nHLA-A*01:06,YFAMYQENMAHTDANTLYIIYRDYTWVALAYRGY\nHLA-A*01:07,YFAMYQENVAHTDENTLYIIYRDYTWVARVYRGY\nHLA-A*01:08,YFAMYQENMAHTDANTLYIIYRDYTWVARVYWGY\nHLA-A*01:09,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:10,YFAMYQENMAHTDANTLYIIYRDYTWARRVYRGY\nHLA-A*01:12,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A*01:13,YFAMYQENMAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*01:14,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTGY\nHLA-A*01:17,YFAMYQENMAQTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:19,YFAMYQENMAHTDANTLYIIYRDYTWAVQAYTGY\nHLA-A*01:20,YSAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A*01:21,YFAMYQENMAHTDANTLYIIYRDYTWAVRVYRGY\nHLA-A*01:23,YFAMYQENVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:24,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:25,YFAMYQENMAHTDANTLYIIYRDYTWVAQVYRGY\nHLA-A*01:26,YFAMYQENMAHTDANTLYIIYRDYTWAARVYRGY\nHLA-A*01:28,YFAMYQENMAHTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*01:29,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:30,YFAMYQENMAHTDANTLYIIYHYYTWVARVYRGY\nHLA-A*01:32,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:33,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:35,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:36,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:37,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:38,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:39,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:40,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:41,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:42,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:43,YYAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:44,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:45,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:46,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:47,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:48,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:49,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:50,YFAMYQENMAHTDANTLYIIYREYTWVARVYRGY\nHLA-A*01:51,YFAMYRNNVAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:54,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:55,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:58,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:59,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:60,YFAMYPENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:61,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:62,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:63,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:64,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRGY\nHLA-A*01:65,YFAMYQENMAHTDANTLYIIYRDYTWVARVCRGY\nHLA-A*01:66,YFAMYQENMAHTDANTLYVRYRDYTWVARVYRGY\nHLA-A*02:01,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:02,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:03,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:04,YFAMYGEKVAHTHVDTLYVMYHYYTWAVLAYTWY\nHLA-A*02:05,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:06,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:07,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:08,YYAMYGENVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:09,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:10,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:11,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:12,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:13,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYTWY\nHLA-A*02:14,YYAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A*02:16,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A*02:17,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:18,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:19,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:20,YFAMYGENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:21,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:22,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A*02:24,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:25,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:26,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:27,YFAMYGEKVAHTHVDTLYVRYHYYTWAAQAYTWY\nHLA-A*02:28,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:29,YFAMYGEQVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:30,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:31,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:33,YFAMYGEKVAHTHVDTLYVRSHYYTWAVLAYTWY\nHLA-A*02:34,YFAMYGEKVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:35,YFAMYGEKVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:36,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTGY\nHLA-A*02:37,YFAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:38,YFAMYGEKVAHTHVDTLYVRYHYYTWAEQAYRWY\nHLA-A*02:39,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:40,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:41,YYAMYGEKVAHTHVDTLYVRYQYYTWAVLAYTWY\nHLA-A*02:42,YFSMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:44,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:45,YFAMYQEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:46,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:47,YFAMYGEKVAHSHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:48,YFAMYEEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:49,YFAMYGEKVAHTHVDTLYVRYHYYTWAVRAYTWY\nHLA-A*02:50,YFAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*02:51,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:52,YFAMYGEKVAHTHVDTLYVRYEHYTWAVLAYTWY\nHLA-A*02:54,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTGY\nHLA-A*02:55,YFAMYRNNVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:56,YFAMYQENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:57,YYAMYGEKVAHTHVDTLYLMYHYYTWAVLAYTWY\nHLA-A*02:58,YFAMYGEKVAHTHVDTLYLRYHYYTWAVLAYTWY\nHLA-A*02:59,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:60,YFAMYGEKVAHTHVDTLYVRYHFYTWAVLAYTWY\nHLA-A*02:61,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:62,YFAMYGENVAQTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:63,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:64,YFAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A*02:65,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*02:66,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:67,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:68,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:69,YFAMYGEKVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:70,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:71,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:72,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:73,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A*02:74,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:75,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:76,YSAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:77,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:78,YYAMYQENVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:79,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:80,YFAMYGEKVAHTHVDTLYVRYQDYTWAVLAYTWY\nHLA-A*02:81,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A*02:84,YYAMYGEKVAHTHVDTLYFRYHYYTWAVLAYTWY\nHLA-A*02:85,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:86,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:87,YFAMYGEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A*02:89,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:90,YFAMYGEKVAHTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:91,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:92,YFAMYEEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:93,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:95,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:96,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:97,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:99,YYAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:101,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYRWY\nHLA-A*02:102,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:103,YFAMYQENVAQTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:104,YFAMYGEKVAHTHVDTLYVRYHYYTWAVWAYTWY\nHLA-A*02:105,YFAMYGEKVAHTHVDTLYVRYEYYTWAVLAYTWY\nHLA-A*02:106,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:107,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:108,YYAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:109,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:110,YFAMYGEKVAHTHVDTLYLMFHYYTWAVLAYTWY\nHLA-A*02:111,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:112,YFAMYGEKVAHTDENIAYVRCHYYTWAVLAYTWY\nHLA-A*02:114,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A*02:115,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:116,YFAMYGEKVAHTHLDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:117,YFAMYGEKVAHTHVDTLYVRYQDYTWAEWAYTWY\nHLA-A*02:118,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:119,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:120,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:121,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:122,YYAMYGEKVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*02:123,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:124,YFAMYGEKVAHTDESIAYVRYHYYTWAVLAYTWY\nHLA-A*02:126,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:127,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYKWY\nHLA-A*02:128,YFAMYGENVAHIDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:129,YYAMYEEKVAHTDENIAYVRYHYYTWAVLAYTWY\nHLA-A*02:130,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:131,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYEWY\nHLA-A*02:132,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:133,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:134,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:135,YFAMYGEKVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*02:136,YFAMYGEKVAHTDENIAYVRYHYYTWAVWAYTWY\nHLA-A*02:137,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:138,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:139,YFAMYGEKVTHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:140,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:141,YFVMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:142,YYAMYGEKVAHTHVDTLYVRYHYYTWAVQAYTWY\nHLA-A*02:143,YYAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:144,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:145,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:146,YFAMYGEKVAHTDANTLYVRYHYYTWAVLAYTWY\nHLA-A*02:147,YFAMYGEKVAHTHVDTLYVRYDYYTWAVLAYTWY\nHLA-A*02:148,YFAMYGEKVAHTHVDTLYVRFHYYTWAEWAYTWY\nHLA-A*02:149,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:150,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:151,YFAMYGEKVAHTHVDTLYVRYDYYTWAVLAYTWY\nHLA-A*02:152,YFAMYGEKVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*02:153,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:154,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYRWY\nHLA-A*02:155,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:156,YFAMYGEKVAHTHVDTLYIIYHYYTWAVLAYTWY\nHLA-A*02:157,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:158,YFAMYGEKVAHAHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:159,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:160,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:161,YFAVYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:162,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:163,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:164,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:165,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:166,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:167,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYKWY\nHLA-A*02:168,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:169,YYAMYQENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:170,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:171,YFAMYGEKVAHTHVDTLYVRYHYYTWAELAYTWY\nHLA-A*02:172,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:173,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:174,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:175,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:176,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:177,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:178,YYAMYGEKVAHTHVDTLYVRYHSYTWAVLAYTWY\nHLA-A*02:179,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:180,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:181,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:182,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:183,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:184,YFAMYGEKVAHTHEDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:185,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:186,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:187,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:188,YFAMYGEKVAHTHVDTLYVRYDSYTWAVLAYTWY\nHLA-A*02:189,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:190,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:191,YFAMYGEKVAHTHVDTLYVRCHYYTWAVWAYTWY\nHLA-A*02:192,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:193,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:194,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:195,YFAMYQENVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:196,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:197,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:198,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:199,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:200,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:201,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:202,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:203,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:204,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:205,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:206,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:207,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:208,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:209,YFAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:210,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:211,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:212,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:213,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:214,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:215,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:216,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:217,YFAMYREKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:218,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:219,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:220,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:221,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:224,YFAMYGEKVAHTHVDTLYVGYHYYTWAVLAYTWY\nHLA-A*02:228,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:229,YYAMYGEKVAHTHVDTLYLRYRYYTWAVWAYTWY\nHLA-A*02:230,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:231,YFAMYGEKVAHTHVDTLYVRNHYYTWAVLAYTWY\nHLA-A*02:232,YYAMYGEKVAHTHVDTLYLRYHYYTWAVWAYTWY\nHLA-A*02:233,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTRY\nHLA-A*02:234,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:235,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:236,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:237,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:238,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:239,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:240,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:241,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:242,YFAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:243,YTAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:244,YYAMYGEKVAHTHVDTLYVRFHYYTWAVLAYTWY\nHLA-A*02:245,YFAMYGEKVAHTHVDTLYIRYHYYTWAVLAYTWY\nHLA-A*02:246,YFAMYGEKVAHTHVDTLYVRYRDYTWAVLAYTWY\nHLA-A*02:247,YFAMYGEKVAHTDENTLYVRYHYYTWAVLAYTWY\nHLA-A*02:248,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:249,YFAMYVEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:251,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:252,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:253,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:254,YFAMYGEKVAHTHVDTLYVRYNFYTWAVLAYTWY\nHLA-A*02:255,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTGY\nHLA-A*02:256,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:257,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:258,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:259,YYAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:260,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*02:261,YFAMYGEKVAHTHMDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:262,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLVYTWY\nHLA-A*02:263,YFAMYGEKVAHTHVDTLYVRYHYYTWSVLAYTWY\nHLA-A*02:264,YFAMYGEKVAHTHVDTLYVRYHYYTWAEWAYTWY\nHLA-A*02:265,YFAMYGEKVAHTHVDTLYVRCHYYTWAVLAYTWY\nHLA-A*02:266,YFAMYGEKVAHTHVDTLYVRYHYYTWAVLAYTWY\nHLA-A*03:01,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:02,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:04,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:05,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:06,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:07,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*03:08,YFAMYQENVAHTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:09,YFAMYQENVAQTHVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:10,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:12,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:13,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:14,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:15,YFAMYQENVAQTDVDTLYIIFRDYTWAELAYTWY\nHLA-A*03:16,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:17,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:18,YFAMYQENVAQTDVDTLYIIYRDYTWVARVYRGY\nHLA-A*03:19,YFAMYQENVAQTDVDTLYIIFHYYTWAELAYTWY\nHLA-A*03:20,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:22,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:23,YFAMYGEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:24,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:25,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:26,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:27,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:28,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:29,YFAMYQENVVQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:30,YFAMYEEKVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:31,YFAMYQENVAQTDVDTLYIIYRYYTWAVQAYTWY\nHLA-A*03:32,YFAMYQENVAHIDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:33,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:34,YFAMYQENVAPTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:35,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:37,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:38,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:39,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:40,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:41,YFAMYQENVAHTDANTLYIIYRDYTWAELAYTWY\nHLA-A*03:42,YFAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*03:43,YFAMYQENVAQTDVDTLYIIYEHYTWAELAYTWY\nHLA-A*03:44,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:45,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:46,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:47,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:48,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:49,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:50,YFAMYQENVAQTDVDTLYIIYRDYTWAEWAYTWY\nHLA-A*03:51,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:52,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:53,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:54,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:55,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:56,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:57,YFAMYQENVAQTDANTLYIIYRDYTWAELAYTWY\nHLA-A*03:58,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:59,CFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:60,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:61,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:62,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:63,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:64,YFAMYQENVAQTDVDTLYIIYRDYTWADLAYTWY\nHLA-A*03:65,YFAMYQENVAQTDVDTLYIIYRDYTWAEQAYTWY\nHLA-A*03:66,YFAMYQENVAQTDVDTLYIIYRDYTWAERAYTWY\nHLA-A*03:67,YFATYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:70,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:71,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:72,YSAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:73,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:74,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:75,YFAMYQENVAQTDVDTLYLMYRDYTWAELAYTWY\nHLA-A*03:76,YFAMYQENVAQTDVDTLYIIYRDYTWAVQAYTWY\nHLA-A*03:77,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:78,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:79,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:80,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:81,YFAMYQENVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*03:82,YFAMYQENVAQTDVDTLYIIYEHYTWAVQAYTWY\nHLA-A*11:01,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:02,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:03,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A*11:04,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTWY\nHLA-A*11:05,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:06,YYAMYQENVAQTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:07,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:08,YYAMYQENVAQTDVDTLYIIYRDYTWAERAYRWY\nHLA-A*11:09,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:10,YYAMYRNNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:11,YYAMYLQNVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:12,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:13,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:14,YYAMYQENVAQTDVDTLYIIYRDYTWARQAYRWY\nHLA-A*11:15,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:16,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:17,YYAMYQENMAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A*11:18,YYAMYQENVAHTHVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:19,YYAMYQENVAHTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:20,YYAMYQENVAQTDVDTLYIIYRDYTWAEQAYRWY\nHLA-A*11:22,YYAMYQENVAQTDVDTLYIIYPDYTWAAQAYRWY\nHLA-A*11:23,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:24,YYAMYQENVAQTDVDTLYIIYRDYTWAALAYRWY\nHLA-A*11:25,YYAMYQENVAQTDVDTLYIIYRDYTWAELAYRWY\nHLA-A*11:26,YYAMYQENVAQTDVDTLYIMYRDYTWAAQAYRWY\nHLA-A*11:27,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYTGY\nHLA-A*11:29,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:30,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:31,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYRWY\nHLA-A*11:32,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:33,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:34,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:35,YYAMYQENVAQTDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*11:36,YYAMYQENVAQTDVDTLYIICRDYTWAAQAYRWY\nHLA-A*11:37,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:38,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRGY\nHLA-A*11:39,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRGY\nHLA-A*11:40,YYAMYQENVAHTDANTLYIIYRDYTWAAQAYRWY\nHLA-A*11:41,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:42,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:43,YTAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:44,YYAMYQENVAQTDVDTLYIIYRDYTWAARAYRWY\nHLA-A*11:45,YYAMYQENVAQTDADTLYIIYRDYTWAAQAYRWY\nHLA-A*11:46,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:47,YHAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:48,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:49,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:51,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:53,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:54,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:55,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:56,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:57,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:58,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:59,YYAMYQENVAQTDVDTLYIIYGDYTWAAQAYRWY\nHLA-A*11:60,YYAMYQENVAQTDVDTLYIIYRDYTWAVQAYRWY\nHLA-A*11:61,YYAMYQENAAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:62,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:63,YYAMYQENVAQTDVDTLYIIYRDYTWAAQAYRWY\nHLA-A*11:64,YYAMYQENVAQTDVDTLHIIYRDYTWAAQAYRWY\nHLA-A*23:01,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:02,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A*23:03,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:04,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A*23:05,CSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:06,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:09,YSAMYQENMAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:10,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYRGY\nHLA-A*23:12,YSAMYEEKVAHTHENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:13,YSAMYEEKVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:14,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:15,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:16,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:17,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:18,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:20,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:21,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:22,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:23,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:24,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:25,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*23:26,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:02,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:03,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:04,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:05,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:06,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTGY\nHLA-A*24:07,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:08,YSAMYGEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:10,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRWY\nHLA-A*24:13,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:14,YSAMYEEKVAHTDENIAYVRYHYYTWAVQAYTGY\nHLA-A*24:15,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A*24:17,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A*24:18,YSAMYEEKVAHTDENIAYLMFHYYTWAELAYTWY\nHLA-A*24:19,YSAMYEEKVAQTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:20,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:21,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:22,YSAMYEEKVAHTDENIAYLMFHYYTWAVWVYTWY\nHLA-A*24:23,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:24,YSAMYRNNVAQTDENIAYLMFHYYTWAVLAYTGY\nHLA-A*24:25,CSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:26,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:27,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:28,YSAMYEEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:29,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:30,YSAMYEEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:31,YSAMYEQKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:32,YSAMYEEKVAHTDESIAYLMFHYYTWAVQAYTGY\nHLA-A*24:33,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:34,YSAMYEEKVAHIDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:35,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:37,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:38,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:39,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:41,YSAMYEEKVAHTDENIAYLMFRDYTWAVQAYTGY\nHLA-A*24:42,YSAMYGEKVAHTHENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:43,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:44,YSAMYEEKVAHTDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:46,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYRGY\nHLA-A*24:47,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:49,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:50,YYAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:51,YSAMYEEKVAHTDENIAYLIYHYYTWAVQAYTGY\nHLA-A*24:52,YSAMYEEKVAHTDENIAYLRFHYYTWAVQAYTGY\nHLA-A*24:53,YSAMYEEKVAHTDENIAYLMYHYYTWAVQAYTGY\nHLA-A*24:54,YSAMYEEKVAHTDENIAYLMFHYYTWAVQPYTGY\nHLA-A*24:55,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYEGY\nHLA-A*24:56,YSAMYEEKVAHTDENIAYLMFHYYTWAEQAYTGY\nHLA-A*24:57,YSAMYEEKVAHTDENIAYIMYHYYTWAVQAYTGY\nHLA-A*24:58,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:59,YSAMYEEKVAHTDENIAYLMFHYYTWAAQAYTGY\nHLA-A*24:61,YSAMYEEKVAHTDEKIAYLMFHYYTWAVQAYTGY\nHLA-A*24:62,YSAMYEEKVAHTDENIAYLMFQDYTWAVQAYTGY\nHLA-A*24:63,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:64,YSAMYEEKVAHTDENIAYLWIHYYTWAVQAYTGY\nHLA-A*24:66,YSAMYEEKVAHTDENIAYLMFEHYTWAVQAYTGY\nHLA-A*24:67,YSAMYRNNVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:68,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:69,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:70,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:71,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:72,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:73,YSAMYEEKVAHTDENIAYLMFDYYTWAVQAYTGY\nHLA-A*24:74,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:75,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:76,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:77,YSAMYQEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:78,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:79,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:80,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:81,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:82,YTAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:85,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:87,YSAMYEEKVAHTDENIAYLMFHYYTWAVRAYTGY\nHLA-A*24:88,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:89,YSAMYGEKVAHTHVDTLYLMFHYYTWAVQAYTGY\nHLA-A*24:91,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:92,YSAMYEEKVAHTDENIAYIIYHYYTWAVQAYTGY\nHLA-A*24:93,YSAMYEEKVAHTDENIAYVMFHYYTWAVQAYTGY\nHLA-A*24:94,YSAMYEEKVAHTDENIAYLMFHYYTWAVLAYTWY\nHLA-A*24:95,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:96,YSAMYEEKVAHTDENIAYLMFNFYTWAVQAYTGY\nHLA-A*24:97,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:98,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:99,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:100,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:101,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:102,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:103,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:104,YFAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:105,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:106,YSAMYEEKVAHTDENIAYLMFDDYTWAVQAYTGY\nHLA-A*24:107,YSAMYEEKVAHTDENIAYLMFHYYTWAVHAYTGY\nHLA-A*24:108,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:109,YSAMYEEKVAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:110,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:111,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:112,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:113,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:114,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:115,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:116,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:117,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:118,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:119,YSAMYEEKVAHADENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:120,YSAMYEEKVAHTDENIAYIMFHYYTWAVQAYTGY\nHLA-A*24:121,YSAMYEEKVAHTDENIAYLMFHSYTWAVQAYTGY\nHLA-A*24:122,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:123,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:124,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:125,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTWY\nHLA-A*24:126,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:127,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:128,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:129,YSAMYQENMAHTDANTLYLMFHYYTWAVQAYTGY\nHLA-A*24:130,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:131,YSAMYEEKVAQTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:133,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:134,YSAMYEEKVAHTDENIAYLMFHYYPWAVQAYTGY\nHLA-A*24:135,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:136,YSAMYEEKVAHTDENIAYLMFHYYTWVVQAYTGY\nHLA-A*24:137,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:138,YSAMYEEKVAHTDENIAYLMFHYYTWAVWAYTWY\nHLA-A*24:139,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:140,YSTMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:141,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:142,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*24:143,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYLGY\nHLA-A*24:144,YSAMYEEKVAHTDENIAYLMFHYYTWAVQAYTGY\nHLA-A*25:01,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:02,YYAMYRNNVAQTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:03,YFAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:04,YYAMYRNNVAHTDESIAYIRYQDYTWAEQAYRWY\nHLA-A*25:05,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:06,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYTWY\nHLA-A*25:07,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:08,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:09,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:10,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:11,YYAMYRNNVAHTHESIAYIRYQDYTWAEWAYRWY\nHLA-A*25:13,YYAMYRNNVAHTDESIAYIRYQDYTWAEWAYRWY\nHLA-A*26:01,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:02,YYAMYRNNVAHTDANTLYIRYQNYTWAEWAYRWY\nHLA-A*26:03,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:04,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYLWY\nHLA-A*26:05,YYAMYRNNVAHTDENTLYIRYQDYTWAEWAYRWY\nHLA-A*26:06,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:07,YYAMYGEKVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:08,YYAMYRNNVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A*26:09,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYTWY\nHLA-A*26:10,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:12,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A*26:13,YYAMYRNNVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:14,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:15,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:16,YSAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:17,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:18,YYAMYRNNVAHTDANTLYIRYQDYTWAVWAYRWY\nHLA-A*26:19,YYAMYQENVAQTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:20,YFAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:21,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:22,YYAMYRNNVAHTDANTLYVRYQDYTWAEWAYRWY\nHLA-A*26:23,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:24,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:26,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:27,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:28,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:29,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A*26:30,YYAMYRNNVAHTHVDTLYIRYQDYTWAEWAYRWY\nHLA-A*26:31,YYAMYPNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:32,YYAMYRNNVAHTDANTLYMVYQDYTWAEWAYRWY\nHLA-A*26:33,YYAMYRNNVAQIHANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:34,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYWWY\nHLA-A*26:35,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:36,YYAMYRNNVAHTHANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:37,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:38,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:39,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:40,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:41,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:42,YYAIYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:43,YYAMYRNNVAHTDANTLYIRYQDYTWAELAYRWY\nHLA-A*26:45,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:46,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:47,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*26:48,YYAMYRNKVAHTDANTLYIRYQDYTWAEQAYRWY\nHLA-A*26:49,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRGY\nHLA-A*26:50,YYAMYRNNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*29:01,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:02,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:03,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTGY\nHLA-A*29:04,YTAMYLQHVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:05,YTAMYLQNVAQTDANTLYIMYRDYTWAEQAYTWY\nHLA-A*29:06,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:07,YTAMYLQNVAQTDANTLYLMFRDYTWAVLAYTWY\nHLA-A*29:09,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:10,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:11,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:12,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:13,YTAMYLQNVAQTDESIAYIMYRDYTWAVLAYTWY\nHLA-A*29:14,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:15,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:16,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:17,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:18,YTAMYQENVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:19,YTAMYLQNVAHTHVDTLYIMYRDYTWAVLAYTWY\nHLA-A*29:20,YTAMYLQNVAHTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:21,YTAMYLQNVAQTDANTLYIMYRDYTWAVLAYTWY\nHLA-A*29:22,YTAMYLQNVAQTDANTLYVRYRDYTWAVLAYTWY\nHLA-A*30:01,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:02,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:03,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:04,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A*30:06,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTWY\nHLA-A*30:07,YSAMYEEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:08,YYAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:09,YSAMYQENVAHTDENTLYIIYEHYTWAVLAYTWY\nHLA-A*30:10,YSAMYQENVAHTDENTLYIIHEHYTWARLAYTWY\nHLA-A*30:11,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:12,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:13,YSAMYQENVAHTHVDTLYIIYEHYTWARLAYTWY\nHLA-A*30:15,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:16,YSAMYQENVAQTHVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:17,YSAMYQENVAQTDVDTLYIIYEHYTWAVWAYTWY\nHLA-A*30:18,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:19,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:20,YSAMYQENVAQTEVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:22,YSAMYGEKVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:23,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:24,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:25,YSAMYQENVAQTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:26,YSAMYQENVAQTDVDTLYIIYRDYTWAWLAYTWY\nHLA-A*30:28,YSAMYQENVAHTDENTLYIVYEHYTWARLAYTWY\nHLA-A*30:29,YSAMYQENVAHTDENTLYIIYEHYTWAVWAYTSY\nHLA-A*30:30,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:31,YSAMYQENVARTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:32,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:33,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:34,YSAMYQENVAHTDENTLYIIYEHYTWARLAYTWY\nHLA-A*30:35,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:36,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:37,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:38,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:39,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:40,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*30:41,YSAMYQENVAQTDVDTLYIIYEHYTWAWLAYTWY\nHLA-A*31:01,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:02,YTAMYQEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:03,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*31:04,YTAMYQENVAHIDVDTLYIIYRDYTWAVLAYTWY\nHLA-A*31:05,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A*31:06,YTAMYQENVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*31:07,YTAMYQEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:08,YTAMYEEKVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:09,YTAMYQENVGHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:10,YTAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*31:11,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:12,YTAMYQENVAHIDVDTLYIKYQDYTWAVLAYTWY\nHLA-A*31:13,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:15,YTAMYQENVARIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:16,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:17,YTAMYQENVAHINVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:18,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTRY\nHLA-A*31:19,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:20,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:21,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:22,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:23,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:24,YTAMYQENVAHIDVDTLYIMYQDYTWAAQAYRWY\nHLA-A*31:25,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYRWY\nHLA-A*31:26,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:27,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:28,YTAMYQENVTHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:29,YTAMYQENVAHIDVDTLYLMFQDYTWAVLAYTWY\nHLA-A*31:30,YTAMYQENVAHIDVDTLYIMYQDYTWAVWAYTWY\nHLA-A*31:31,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:32,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:33,YTAMYQENVAHIDGDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:34,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:35,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:36,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*31:37,YTAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*32:01,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:02,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A*32:03,YFAMYQENVAHTDENIAYIMYQDYTWAVLAYTWY\nHLA-A*32:04,YFAMYQENVAHTDESIAYIIYRDYTWAELAYTWY\nHLA-A*32:05,YFAMYQEKVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:06,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:07,YSAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:08,YFAMYQENVAHTHESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:09,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTRY\nHLA-A*32:10,YFAMYQENVAHTDESIAYIMYQDYTWAEWAYTWY\nHLA-A*32:12,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:13,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTGY\nHLA-A*32:14,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:15,YFAMYRNNVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:16,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:17,YFAMYQENVAQTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:18,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:20,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:21,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:22,YFAMYQENVAHTDESIAYIMYQDYTWAVQAYTWY\nHLA-A*32:23,YFAMYQENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:24,YFAMYQENMAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*32:25,YFAMYHENVAHTDESIAYIMYQDYTWAVLAYTWY\nHLA-A*33:01,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:03,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:04,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:05,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:06,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:07,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:08,YTAMYGEKVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:09,YTAMYGENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:10,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTGY\nHLA-A*33:11,YTAMYRNNVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:12,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:13,YTAMYRNNVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A*33:14,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:15,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:16,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:17,YTAMYRNNVAHIDADTLYIMYQDYTWAVLAYTWY\nHLA-A*33:18,YTAMYRNNVAHIDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*33:19,YTAMYRNNVAHIDVDTLYLMFHYYTWAVQAYTGY\nHLA-A*33:20,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:21,YTAMYEENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:22,YTAMYRNNVAHIDVDTLYVRYQDYTWAVLAYTWY\nHLA-A*33:23,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:24,YTAMYRNNVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:25,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:26,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:27,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWH\nHLA-A*33:28,YTAMYRNNVAHIDVDTLYIMYQDYTWAELAYTWY\nHLA-A*33:29,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:30,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*33:31,YTAMYRNNVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*34:01,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A*34:02,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:03,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:04,YYAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:05,YYAMYRNKVAQTDVDTLYIRYQDYTWAEWAYTWY\nHLA-A*34:06,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYTWY\nHLA-A*34:07,YYAMYRNNVSQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*34:08,YFAMYRNNVAQTDVDTLYIIYRDYTWAELAYTWY\nHLA-A*36:01,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A*36:02,YFAMYQENMAHTDANTLYIIYRDYTWVARAYTWY\nHLA-A*36:03,YFAMYQENMAHTDANTLYLMYRDYTWVARVYTWY\nHLA-A*36:04,YFAMYQENMAHTDANTLYIIYRDYTWVARVYRWY\nHLA-A*36:05,YFAMYQENMAHTDANTLYIIYRDYTWVARVYTWY\nHLA-A*43:01,YYAMYLQNVAHTDANTLYIRYQDYTWAEWAYRWY\nHLA-A*66:01,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:02,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A*66:03,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYEWY\nHLA-A*66:04,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWH\nHLA-A*66:05,YYAMYRNNVAHTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:06,YYAMYRNNVAQTDVDTLYIRYQDYTWAVLAYRWY\nHLA-A*66:07,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:08,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:09,YYAMYRNNVAQTDVDTLYVRYQDYTWAEWAYRWY\nHLA-A*66:10,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRGY\nHLA-A*66:11,YYAMYRNNVAQTDADTLYIRYQDYTWAEWAYRWY\nHLA-A*66:12,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:13,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:14,YYAMYRNNVAQTDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*66:15,YYAMYRNNVAHIDVDTLYIRYQDYTWAEWAYRWY\nHLA-A*68:01,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:02,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:03,YYAMYRNNVAHTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:04,YYAMYRNNVAHIDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:05,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:06,YYAMYRNNVAQTDVDTLYIMYEHYTWAVWAYTWY\nHLA-A*68:07,YYAMYRNNVAQTDVDTLYIMYRHYTWAVWAYTWY\nHLA-A*68:08,YYAMYRNNVAQTDVDTLYIMYRDYTWAVLAYTWY\nHLA-A*68:09,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTWY\nHLA-A*68:10,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:12,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:13,YYAMYRENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:14,YYAMYEENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:15,YYAMYRNNVAHTHVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:16,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:17,YYAMYRNNVAQTDVDTLYIMYRVYTWAVWAYTWY\nHLA-A*68:19,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:20,YYAMYRNNVAHTHVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:21,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:22,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:23,YYAMYRNNVAQTDVDTLYIRYRDYTWAVWAYTWY\nHLA-A*68:24,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:25,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:26,YYAMYRNNVAQTDVDTLYIMYRDYTWAVQAYTGY\nHLA-A*68:27,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:28,YYAMYRNNVAQTDVDTLYIRYHYYTWAVRAYTWY\nHLA-A*68:29,YTAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:30,YYAMYGENVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:31,YYAMYRNNVAHTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:32,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:33,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:34,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYMWY\nHLA-A*68:35,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:36,YYAMYRNNVAQTDENIAYIMYRDYTWAVWAYTWY\nHLA-A*68:37,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:38,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:39,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:40,YYAMYRNNVGQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:41,YYAMYRNNVAQTDVDTLYIMYRDYTWVVWAYTWY\nHLA-A*68:42,YYAMYRNNVAQTDVDTLYIMYRDYTWAEWAYTWY\nHLA-A*68:43,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:44,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:45,YSAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:46,YYAMYRNNVAQTDVNTLYIMYRDYTWAVWAYTWY\nHLA-A*68:47,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:48,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:50,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:51,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:52,YYAMYRNNVAQTDVDTLYIMYRDYTWAVWAYTWY\nHLA-A*68:53,YYAMYRNNVAQTDVDTLYIRYHYYTWAVWAYTWY\nHLA-A*68:54,YYAMYRNNVAQTDVDTLYIRYHYYTWAEWAYTWY\nHLA-A*69:01,YYAMYRNNVAQTDVDTLYVRYHYYTWAVLAYTWY\nHLA-A*74:01,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:02,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:03,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:04,YFAMYGEKVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:05,YFAMYQENVAHADVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:06,YFAMYQENVAHTHVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:07,YFAMYQENVAHIDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:08,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:09,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:10,YFAMYQENVAHTDANTLYIMYQDYTWAVLAYTWY\nHLA-A*74:11,YFAMYQENVAHTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*74:13,YFAMYQENVAQTDVDTLYIMYQDYTWAVLAYTWY\nHLA-A*80:01,YFAMYEENVAHTNANTLYIIYRDYTWARLAYEGY\nHLA-A*80:02,YFAMYEENVAHTDVDTLYIIYRDYTWARLAYEGY\nHLA-B*07:02,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:03,YYSEYRNIYTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:04,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYEWY\nHLA-B*07:05,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:06,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:07,YYSEYRNIYAQTDESNLYLRYDYYTWAERAYEWY\nHLA-B*07:08,YYSEYRNIFTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:09,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B*07:10,YYSEYRNICAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:11,YYSEYRNIYAQTDENNLYLSYDSYTWAERAYEWY\nHLA-B*07:12,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:13,YYSGYREKYRQADVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:14,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:15,YYSEYRNIYAQADVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:16,YYSEYRNIYTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:17,YYSEYRNIYAQTDESNLYLSYDSYTWAERAYEWY\nHLA-B*07:18,YYSEYRNIYAQTDESNLYIRYDYYTWAERAYEWY\nHLA-B*07:19,YYSEYRNIYAQTDESNLYLSYDYYTWAEDAYTWY\nHLA-B*07:20,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLSY\nHLA-B*07:21,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:22,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:23,YYSEYRNIYAQTDESNLHLSYDYYTWAERAYEWY\nHLA-B*07:24,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B*07:25,YYSEYRNIYAQTDESNLYLSYDYYTWAVDAYEWY\nHLA-B*07:26,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYEWY\nHLA-B*07:27,YYSEYRNISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B*07:28,YYSEYRNIYAQTDESNLYLSYDDYTWAERAYEWY\nHLA-B*07:29,YDSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:30,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:31,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYTWY\nHLA-B*07:32,YYSEYRNIFTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:33,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:34,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYTWY\nHLA-B*07:35,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:36,YYSEYRNIYAQTDENIAYLSYDYYTWAERAYEWY\nHLA-B*07:37,YYSEYRNIYANTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:38,YYSEYRNIFTNTYENIAYLSYDYYTWAERAYEWY\nHLA-B*07:39,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:40,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:41,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:42,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:43,YYSEYRNIYAQTDESNLYLSYDYYTWAELAYTWY\nHLA-B*07:44,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:45,YYSEYRNIYAQTDESNLYLSYDYYTWAERTYEWY\nHLA-B*07:46,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:47,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:48,YYSEYRNIYAQTDESNLYLSYDYYTWAVLAYEWY\nHLA-B*07:50,YYSEYRNISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:51,YYSEYRNIYAQTDESNLYLSYDYYTWAARAYEWY\nHLA-B*07:52,YYSEYRNIYAQTDESNLYLSYDYYTWAERVYEWY\nHLA-B*07:53,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:54,YYSEYREIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:55,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYEWY\nHLA-B*07:56,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:57,YYSEYRNIYAQTDENNLYLSYDYYTWAERAYEWY\nHLA-B*07:58,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:59,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:60,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYLWY\nHLA-B*07:61,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:62,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:63,YYSDYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:64,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEGY\nHLA-B*07:65,YYATYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:66,YYSEYRNIYAQTDESNLYLSYDYYTWAEQAYEWY\nHLA-B*07:68,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:69,YYSEYRNICTNTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:70,YCSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:71,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:72,YYAEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:73,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:74,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:75,YYSEYRNIYAQTYENNLYLSYDYYTWAERAYEWY\nHLA-B*07:76,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:77,YYSEYRNIYAQTDESNLYLRSDYYTWAERAYEWY\nHLA-B*07:78,YYSEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B*07:79,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:80,YYSEYRNIYAQTDESNLYLSYNYYTWAELAYEWY\nHLA-B*07:81,YYSEYRNIYAQTDESIAYLSYDYYTWAERAYEWY\nHLA-B*07:82,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:83,YYSEYRNIFAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:84,YYSEYRNIYAQTDESNLYWTYDYYTWAERAYEWY\nHLA-B*07:85,YYSEYRNICTNTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:86,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:87,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:88,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:89,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:90,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:91,YYSEYRNIYAQTYESNLYLSYDYYTWAERAYEWY\nHLA-B*07:92,YYSEYRNIYAQTDVSNLYLSYDYYTWAERAYEWY\nHLA-B*07:93,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:94,YYSEYWNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:95,YYSEYRNIYAQTDESNLYFSYDYYTWAERAYEWY\nHLA-B*07:96,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:97,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:98,YYSEYRNIYAQTDESNLYLSYDYYTCAERAYEWY\nHLA-B*07:99,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:100,YYSEYRNIYAQTDESNLYLSYDYYTWAEWAYLWY\nHLA-B*07:101,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:102,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:103,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:104,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:105,YYSEYRNIYAQTVESNLYLSYNYYTWAERAYEWY\nHLA-B*07:106,YYSEYRNIYAQTDESNLYLSYDYYTRAERAYEWY\nHLA-B*07:107,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:108,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:109,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:110,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:112,YYSEYRNIYAQTDESNLYLSYNYYTWAERAYEWY\nHLA-B*07:113,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:114,YYSEYRNIYAQTDESNLYLSYDYYTWAERAYEWY\nHLA-B*07:115,YYSEYRNIYAQTDESNLYLSYNFYTWAERAYEWY\nHLA-B*08:01,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:02,YDSEYRNIFTNTDENTAYLSYNYYTWAVDAYTWY\nHLA-B*08:03,YDSEYRNIFTNTYENIAYLSYNYYTWAVDAYTWY\nHLA-B*08:04,YDSEYRNISTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:05,YDSEYRNTFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:07,YDSEYRNIFTNTDESNLYLSYDYYTWAVDAYTWY\nHLA-B*08:09,YDSEYRNIFTNTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*08:10,YDSEYRDIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:11,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYAWY\nHLA-B*08:12,YDSEYRNIFTNTDESNLYLRYNYYTWAVDAYTWY\nHLA-B*08:13,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYTWY\nHLA-B*08:14,YDSEYRNIFTNTDESNLYLSYHDYTWAVDAYTWY\nHLA-B*08:15,YDSEYRNIFTNTDVSNLYLSYNYYTWAVDAYTWY\nHLA-B*08:16,YDSEYRNIFTNADESNLYLRYNYYTWAVDAYTWY\nHLA-B*08:17,YDSEYREISTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B*08:18,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:20,YDSEYRNIFTNTDESNLYLSYNYYTWAERAYTWY\nHLA-B*08:21,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYLWY\nHLA-B*08:22,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:23,YDSEYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:24,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:25,YDSEYRNIFTNTDESNLYLSYNYYTWAVLAYLWY\nHLA-B*08:26,YYAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:27,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:28,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*08:29,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:31,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:32,YDSTYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:33,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:34,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:35,YDSEYRNIFTNTDESNLYLSYNSYTWAVDAYTWY\nHLA-B*08:36,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:37,YDSEYRNIFTNTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*08:38,YDSEYREIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:39,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:40,YDSEYRNIFTNTDESNLYLSYNYYTWAVRAYEWY\nHLA-B*08:41,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:42,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:43,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:44,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:45,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:46,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:47,YDSEYRNIFTNTDENNLYLSYNYYTWAVDAYTWY\nHLA-B*08:48,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:49,YDSEYRNIFTNTDESNLYIRSNFYTWAVDAYTWY\nHLA-B*08:50,YYSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:51,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:52,YDSEYRNIFTNTDESIAYLSYNYYTWAVDAYTWY\nHLA-B*08:53,YDSEYRNIFTNTDESNLYLSYNYYTWAEDAYTWY\nHLA-B*08:54,YDSEYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:55,YDSEYRNIFTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B*08:56,YDAEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:57,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:58,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:59,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:60,YDSEYRNIFTNTDESNLYISYNYYTWAVDAYTWY\nHLA-B*08:61,YDSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*08:62,YHSEYRNIFTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*13:01,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:02,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:03,YYTMYREISTNTYENTAYWTYNLYTWAVLAYLWY\nHLA-B*13:04,YYTMYREISTNTYENTAYWTYDSYTWAVLAYLWY\nHLA-B*13:06,YYTMYREISTNTYENTAYIRYNLYTWAVLAYTWY\nHLA-B*13:09,YYTMYREISTNTYESNLYWTYNLYTWAVLAYEWY\nHLA-B*13:10,YYTMYREISTNTYENTAYLRYDSYTWAVLAYEWY\nHLA-B*13:11,YYTMYREISTNTYENTAYLRYNLYTWAVLAYEWY\nHLA-B*13:12,YYTMYREISTNTYENTAYIRYNLYTWAVLAYGWY\nHLA-B*13:13,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B*13:14,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:15,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWH\nHLA-B*13:16,YYTMYREISTNTYENTAYWTYNLYTWAELAYEWY\nHLA-B*13:17,YYAMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:18,YYTMYREISTNTYENTAYWTYNLYTWAVRAYEWY\nHLA-B*13:19,YYTMYREVSTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:20,YYTMYREISTNTYENTAYIRYNLYTWAELAYEWY\nHLA-B*13:21,YYTMYREISTNTYENTAYIRYNYYTWAVLAYEWY\nHLA-B*13:22,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:23,YHTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:25,YYTMYREISTNTYESTAYIRYNLYTWAVLAYEWY\nHLA-B*13:26,YYTMYREISTNTYENTAYIRYDSYTWAVLAYEWY\nHLA-B*13:27,YYTMYREISTNTYENTAYWTFNLYTWAVLAYEWY\nHLA-B*13:28,YYTMYREISTNTYENTACIRYNLYTWAVLAYEWY\nHLA-B*13:29,YYTMYREISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:30,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:31,YYTMYREISTNTYENTAYWTYNLYTWAEWAYEWY\nHLA-B*13:32,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:33,YYAMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:34,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:35,YYTMYREISTNTYENTAYWTYDYYTWAVLAYEWY\nHLA-B*13:36,YYTMYRNISTNTYENTAYIRYNLYTWAVLAYEWY\nHLA-B*13:37,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:38,YYTMYREISTNTYENTAYWTYNLYTWAVLAYEWY\nHLA-B*13:39,YYTMYREISTNTYENNLYIRYNLYTWAVLAYEWY\nHLA-B*14:01,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:02,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:03,YYSEYRNICTNTDESNLYLWYNFYTWAERAYTWH\nHLA-B*14:04,HYSEYRNNCTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:05,YYSEYRNICTNTDESNLYLSYNFYTWAELAYTWH\nHLA-B*14:06,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B*14:08,YYSEYRNICTNTDESNLYLRYNFYTWAELAYTWH\nHLA-B*14:09,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:10,YYSEYRNICTNTDESNLYIRYNFYTWAELAYTWH\nHLA-B*14:11,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:12,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:13,YYSEYRNICTNTDESNLYLSYNYYTWAELAYTWH\nHLA-B*14:14,YYSEYRNICTNTYESNLYLWYNFYTWAELAYTWH\nHLA-B*14:15,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:16,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:17,YYSEYRNICTNTDESNLYLWYNFYTWAELAYTWH\nHLA-B*14:18,YYSEYRNICTNTYESNLYLWYNFYTWAELAYTWH\nHLA-B*15:01,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:02,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:03,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:04,YYAMYREISTNTYESNLYWTYDSYTWAEWAYLWY\nHLA-B*15:05,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:06,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B*15:07,YYAMYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*15:08,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:09,YYSEYRNICTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B*15:10,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWY\nHLA-B*15:11,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:12,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B*15:13,YYAMYRNISTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B*15:14,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLSY\nHLA-B*15:15,YYAMYRNISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:16,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:17,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:18,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:19,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLGY\nHLA-B*15:20,YYAMYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*15:21,YYAMYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:23,YYSEYRNICTNTYENIAYLRYDSYTWAELAYLWY\nHLA-B*15:24,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B*15:25,YYAMYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:27,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B*15:28,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:29,YYSEYRNIFTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:30,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*15:31,YYAMYRNISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:32,YYAMYREISTNTYESNLYLRSDSYTWAEWAYLWY\nHLA-B*15:33,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:34,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:35,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B*15:36,YYAMYREISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B*15:37,YYSEYRNICTNTYESNLYLRYDYYTWAELAYLWH\nHLA-B*15:38,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWH\nHLA-B*15:39,YYAMYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:40,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B*15:42,YYAMYREISTNTYESNLYWTYNLYTWAELAYTWY\nHLA-B*15:43,YYAMYREISTNTYEDTLYLRYDSYTWAEWAYLWY\nHLA-B*15:44,YYAMYRNICTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B*15:45,YYAMYREISTNTYESNLYLSYDYYTWAEWAYLWY\nHLA-B*15:46,YYAKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:47,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*15:48,YYAMYREISTNTYESNLYLRYNYYTWAVLTYLWY\nHLA-B*15:49,YYSEYREISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*15:50,YYAMYREISTNTYESNLYLRYDSYTWAEWAYTWY\nHLA-B*15:51,YYSEYRNICTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B*15:52,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*15:53,YYTKYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:54,YYSEYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:55,YYAMYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*15:56,YYAMYREIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:57,YYAMYREISTNTYVNNLYLRYDSYTWAEWAYLWY\nHLA-B*15:58,YYAMYREISTNTYESNLYLRYNFYTWAEWAYLWY\nHLA-B*15:60,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:61,YYSEYREISTNTDESNLYLRYDSYTWAELAYLWY\nHLA-B*15:62,YYSEYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:63,YYAMYREISTNTYESNLYLRYDYYTWAEWAYLWY\nHLA-B*15:64,YYSEYRNISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:65,YYAMYREISTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B*15:66,YYAMYREICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:67,YYAMYRENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:68,YYSEYREISTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*15:69,YYSEYREISTNTYESNLYLRYDSYTWAELTYTWY\nHLA-B*15:70,YYAMYREISTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:71,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:72,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:73,YYAMYREISTNTYESNLYLRYNLYTWAEWAYLWY\nHLA-B*15:74,YYSEYREISINTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:75,YYAMYREISTNTYESNLYLRYDSYTWAQWAYLWY\nHLA-B*15:76,YYAMYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:77,YYAMYREISTNTYESNLYIRYDDYTWAEWAYLWY\nHLA-B*15:78,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:80,YYSEYRNICTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:81,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:82,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:83,YYAMYREISTNTYESNLYWTYNYYTWAVDAYTWY\nHLA-B*15:84,YYAMYREISTNTYESNLYLRFDSYTWAVRAYLWY\nHLA-B*15:85,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:86,YYAMYREISTNTYESNLYLRYNLYTWAVLAYTWY\nHLA-B*15:87,YYAMYREISTNTYESIAYLRYDSYTWAEWAYLWY\nHLA-B*15:88,YYAMYRNISTNTYESNLYIRYDSYTWATLAYLWY\nHLA-B*15:89,YYAMYRNISTNTYENTAYIRYDSYTWAELAYLWY\nHLA-B*15:90,YYSEYRNICTNTYESNLYLRYDYYTWAELVYLWY\nHLA-B*15:91,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLSY\nHLA-B*15:92,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:93,YYSEYRNICTNTYESNLYLRYDSYTWAELAYTWY\nHLA-B*15:95,YYAMYQENMASTYENIAYWRYDSYTWAELAYLWY\nHLA-B*15:96,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:97,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:98,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:99,YYSEYRNICTNTYESNLYLRYDYYTWAERAYLWY\nHLA-B*15:101,YYAMYREIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:102,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:103,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:104,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:105,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:106,YYAKYREISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:107,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:108,YYSEYRNICTNTYESNLYLRYDSYTWAELTYLWY\nHLA-B*15:109,YYAMYREISTNTYESNLYLRFDSYTWAEWAYLWY\nHLA-B*15:110,YYAMYREISTNTYESNLYLRCDSYTWAEWAYLWY\nHLA-B*15:112,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:113,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:114,YYSEYRNICTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*15:115,YYSEYRNICTNTYESTAYLRYDSYTWAELAYLWY\nHLA-B*15:116,YYAMYREISTNTYESNLYLRYDSYSLAEWAYLWY\nHLA-B*15:117,YYAMYREISTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B*15:118,YYAMYREISTNTYESNLYLMYDSYTWAEWAYLWY\nHLA-B*15:119,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:120,YYAMYRDISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:121,YYAMYRNISTNTYESNLYIRYDSYTWAELAYTWY\nHLA-B*15:122,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:123,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:124,YYSEYRNICTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*15:125,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:126,YYAMYREISTNTYESNLYLSYDSCTWAEWAYLWY\nHLA-B*15:127,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:128,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:129,YYAMYREISTNTYESNLYLNYDSYTWAEWAYLWY\nHLA-B*15:131,YYSEYREISTNTYESNLYLRYDSYTWAELAYLSY\nHLA-B*15:132,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:133,YYSEYRNICTNTYESNLYLRYDFYTWAELAYLWY\nHLA-B*15:134,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:135,YYAMYREISTNTYENNLYLRYDSYTWAEWAYLWY\nHLA-B*15:136,YYAMYREISTNTYESNLYLRYDSYTWAVLTYLWY\nHLA-B*15:137,YYAMYREISTNTYESNLYWTYNFYTWAEWAYLWY\nHLA-B*15:138,YYAMYREISTNTYESNLYLRYDSYTWAELAYEWY\nHLA-B*15:139,YYAMYRNISANTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:140,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:141,YYAMYREISTNTYESNLYLTYDSYTWAEWAYLWY\nHLA-B*15:142,YDAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:143,YYAKYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:144,YYAMYRNISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:145,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:146,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:147,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:148,YYAMYRNIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:150,YYAMYREISTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*15:151,YYSEYREISTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*15:152,YYAMYREIYTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:153,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:154,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:155,YYAMYREISTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B*15:156,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:157,YYAMYREISTNTYENIAYLRYDSYTWAEWAYLWY\nHLA-B*15:158,YYSEYREISTNTYESNLFLRYDSYTWAELAYLWY\nHLA-B*15:159,YYAMYREISTNTYESNLHLRYDSYTWAEWAYLWY\nHLA-B*15:160,YYAMHREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:161,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLSY\nHLA-B*15:162,YYAMYRENMASTYENIAYLRYHDYTWAALAYLWY\nHLA-B*15:163,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:164,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:165,YYAMYREISTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*15:166,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:167,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:168,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:169,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:170,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:171,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:172,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:173,YYSEYREISTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:174,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:175,YHAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:176,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:177,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:178,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:179,YYAMYREISTNTYESNLYLRYDSYTWAVDAYLWY\nHLA-B*15:180,YDSEYRNIFTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:183,YYTMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:184,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:185,YYAMYREISTNTYESNLYLRYDSYTWAVLAYLWH\nHLA-B*15:186,YYSEYRNICTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B*15:187,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:188,YYAMYREISTNTYESNLYLRYNYYTWAVLAYTWY\nHLA-B*15:189,YYAMYRNICTNTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:191,YYAMYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:192,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:193,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:194,YYAMYRNISTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*15:195,YYAMYREISTNTYESNLYLRFDSYTWAELAYLWY\nHLA-B*15:196,YYAMYRENMASTYENIAYLRYHDYTWAELAYLWY\nHLA-B*15:197,YYSEYRNICTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B*15:198,YYSEYRNICTNTYESNLYLRYDSYTWAELAYLWY\nHLA-B*15:199,YYAMYREISTNTYESNLYLRYDSYTWAEDAYTWY\nHLA-B*15:200,YYSEYRNICTNTYESNLYLRYDSYTWATLAYLWY\nHLA-B*15:201,YYAMYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*15:202,YYATYREISTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*18:01,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:02,YHSTYRNISTNTYESNLYLNYDSYTWAVLAYTWH\nHLA-B*18:03,YHSTYRNISTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:04,YYATYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:05,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:06,YHSTYRNISTNTYVSNLYLRYDSYTWAVLAYTWH\nHLA-B*18:07,YHSTYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:08,YHSTYRNISTNTYESNLYLRCDSYTWAVLAYTWH\nHLA-B*18:09,YHSTYRNISTNTYENTAYLRYDSYTWAVLAYTWH\nHLA-B*18:10,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*18:11,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWY\nHLA-B*18:12,YHSTYREISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:13,YHSTYRNISTNTYESNLYLRYDSYTWAVRAYTWH\nHLA-B*18:14,YHSTYRNISTNTYESNLYLSYDSYTWAVLAYTWH\nHLA-B*18:15,YHSTYRNISTNTYESNLYLRYDSYTWAELAYTWH\nHLA-B*18:18,YHSTYRNISTNTYESNLYLRSDSYTWAVLAYTWH\nHLA-B*18:19,YHSTYRNISTNTYESNLYLRYDSYTWAEWAYTWH\nHLA-B*18:20,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:21,YHSTYRNISTNTYESNLYLRYDSYTWAERAYEWY\nHLA-B*18:22,YHSTYRNISTNTYESNLYISYDSYTWAVLAYTWH\nHLA-B*18:24,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:25,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:26,YHSTYRNISTNTYESNLYLRYNYYTWAVLAYTWH\nHLA-B*18:27,YHSTYRNISTNTYESNLYLMFDSYTWAVLAYTWH\nHLA-B*18:28,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:29,YHATYRNIFTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:30,YHSTYRNISTNTYESNLYLRYDSYTWAERAYTWH\nHLA-B*18:31,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:32,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:33,YHSTYRNICTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:34,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:35,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*18:36,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYLWH\nHLA-B*18:37,YHSEYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:38,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:39,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:40,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:41,YHSTYRNISTNTYESNLYLRYESYTWAVLAYTWH\nHLA-B*18:42,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:43,YYSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:44,YHSTYRNISTNTYESNLYLWYDSYTWAVLAYTWH\nHLA-B*18:45,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:46,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:47,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:48,YHSKYRNISTNTYESNLYLRYDSYTWAVLAYTWH\nHLA-B*18:49,YHSTYRNISTNTYENNLYLRYDSYTWAVLAYTWH\nHLA-B*18:50,YHSTYRNISTNTYESNLYLRYDSYTWAVLAYEWH\nHLA-B*27:01,YHTEYREICAKTYENTAYLNYHDYTWAVLAYEWY\nHLA-B*27:02,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B*27:03,YHTEHREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:04,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*27:05,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:06,YHTEYREICAKTDESTLYLNYDYYTWAELAYEWY\nHLA-B*27:07,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B*27:08,YHTEYREICAKTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:09,YHTEYREICAKTDEDTLYLNYHHYTWAVLAYEWY\nHLA-B*27:10,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B*27:11,YHTEYREICAKTDESTLYLSYNYYTWAVLAYEWY\nHLA-B*27:12,YHTEYREICTNTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:13,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:14,YHTEYREICAKTDEDTLYWTYHDYTWAVLAYEWY\nHLA-B*27:15,YHTEYREICAKTDESTLYLNYHDYTWAELAYTWY\nHLA-B*27:16,YHTEYREICTNTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:17,YHTEFREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:18,YHTEYREISTNTYESNLYLNYHDYTWAELAYEWY\nHLA-B*27:19,YHTEYREICAKTDEDTLYIRYHDYTWAVLAYEWY\nHLA-B*27:20,YHTEYREICAKTDESTLYLNYNYYTWAELAYEWY\nHLA-B*27:21,YHTEYREICAKTDESTLYLRYDYYTWAELAYEWY\nHLA-B*27:23,YHTEYRNIFTNTYESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:24,YHTEYREICAKTDESTLYLSYNYYSWAELAYEWY\nHLA-B*27:25,YHTEYREICAKTDESTLYLNYHDYTWAEWAYLWY\nHLA-B*27:26,YHTEYREICAQTDESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:27,YHTEYREICAKTDEDTLYLNYNYYTWAVLAYEWY\nHLA-B*27:28,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYTWH\nHLA-B*27:29,YHTEYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:30,YHTEYREICAKTDENIAYIRYHDYTWAVLAYEWY\nHLA-B*27:31,YHTEYREICAQTDESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:32,YHTEYREICAKTDEDTLYLSYHDYTWAVLAYEWY\nHLA-B*27:33,YHTEYREICAKTDESNLYLSYNYYTWAVLAYEWY\nHLA-B*27:34,YHTEYREICAKTDEDTLYLSYDYYTWAVLAYEWY\nHLA-B*27:35,YHTEYREICAKTDEDTLYLNYNFYTWAVLAYEWY\nHLA-B*27:36,YHTEYREICAKTDESTLYLNYHDYSLAVLAYEWY\nHLA-B*27:37,YYTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:38,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYLWY\nHLA-B*27:39,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:40,YHTEYREICAKTDESNLYLNYHDYTWAELAYEWY\nHLA-B*27:41,YHTEYREICAKTDEDTLYLNYDSYTWAVLAYEWY\nHLA-B*27:42,YHTEYREICAKTDEDNLYLNYHDYTWAVLAYEWY\nHLA-B*27:43,YHTEYREICAKTDEDTLYLSYNYYTWAVLAYEWY\nHLA-B*27:44,YHTEYREICAKTYESNLYLNYHDYTWAVLAYEWY\nHLA-B*27:45,YHTEYREICAKTDEDTLYLNYHDYTWAVRAYEWY\nHLA-B*27:46,YHTEYREICAKTDEDTLYLNYHYYTWAVLAYEWY\nHLA-B*27:47,YHTEYREICAKTDEDTLYLNYHDYTWAVDAYLSY\nHLA-B*27:48,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:49,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:50,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYESY\nHLA-B*27:51,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:52,YHTTYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:53,YHTEYREICAKTDEDIAYLNYHDYTWAVLAYEWY\nHLA-B*27:54,YHTEYREICAKTDEDTLYLNYHDYTWAELAYEWY\nHLA-B*27:55,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:56,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:57,YHTEYREICAKTDENIAYLNYHDYTWAVLAYEWY\nHLA-B*27:58,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:60,YHTEYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:61,YHTEYREICAKTDESTLYLNYHDYTWAVLAYEWY\nHLA-B*27:62,YHTEYREICAKTDENIAYLNYHDYTWAVLAYTWH\nHLA-B*27:63,YHTEYREICAKTDESTLYLNYHDYTWAELAYLWY\nHLA-B*27:67,YHTMYREICAKTDEDTLYLNYHDYTWAVLAYEWY\nHLA-B*27:68,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*27:69,YHTEYREICAKTDESTLYLNYHDYTWAELAYEWY\nHLA-B*35:01,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:02,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:03,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:04,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:05,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:06,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B*35:07,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:08,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:09,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:10,YYATYREIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:11,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWY\nHLA-B*35:12,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:13,YYATYREIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:14,YYATYRNIFTNTYESNLYIRYDSYTWAEWAYLWY\nHLA-B*35:15,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYEWY\nHLA-B*35:16,YYATYREIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:17,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:18,YYATYRNIFTNTYESNLYIRYNYYTWAVRAYLWY\nHLA-B*35:19,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:20,YYATYRNISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:21,YYATYRNIFTNTYESNLYIRYDSYTWAELAYLWH\nHLA-B*35:22,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYLWY\nHLA-B*35:23,YYATYRNIFTNTYESNLYIRFDSYTWAVLAYLWY\nHLA-B*35:24,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWH\nHLA-B*35:25,YYSEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:26,YYAEYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:27,YYATYRNIFTNTYENNLYIRYDSYTWAVLAYLWY\nHLA-B*35:28,YYATYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:29,YYATYRNIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:30,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:31,YYATYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*35:32,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*35:33,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYEWY\nHLA-B*35:34,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B*35:35,YYATYRNIFTNTYESNLYIRYDSYTWAVLTYTWY\nHLA-B*35:36,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:37,YYATYRNIFTNTYESNLYWTYDSYTWAVLAYLWY\nHLA-B*35:38,YYATYRNIFTNTYESNLYIRYDFYTWAVDAYLWY\nHLA-B*35:39,YYATYRNIFTNTYESNLYIRYDYYTWAVLAYLWY\nHLA-B*35:41,YYATYRNIFTNTYESNLYIRYDSCTWAVLAYLWY\nHLA-B*35:42,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:43,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:44,YYATYRNIFTNTYESNLYLRYNYYTWAEWAYLWY\nHLA-B*35:45,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLSY\nHLA-B*35:46,YYAMYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:47,YYAKYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:48,YYATYRNIFTNTYESNLYIRSDSYTWAVLAYLWY\nHLA-B*35:49,YYAEYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:50,YHATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:51,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:52,YYATYRNIFTNTYECNLYIRYDSYTWAVLAYLWY\nHLA-B*35:54,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:55,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:56,YYATYRNIFTNTYENNLYIRYDFYTWAVLAYLWY\nHLA-B*35:57,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:58,YYATYRNIFTNTYESNLYLSYDSYTWAELAYLWY\nHLA-B*35:59,YYATYRNIFTNTYESNLYIRYNFYTWAVLAYLWY\nHLA-B*35:60,YYATYRNIFTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*35:61,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:62,YYATYRNIFTNTYESNLYIRYDSYTWAVWAYLWY\nHLA-B*35:63,YHTKYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:64,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:66,YYATYRNIFTNTYESNLYLSYDSYTWAVRAYEWY\nHLA-B*35:67,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:68,YYATYRNIFTNTYESNLYLRYDSYTWAVLAYLWY\nHLA-B*35:69,YYATYREIFTNTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:70,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:71,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLSY\nHLA-B*35:72,YYATYRNISTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:74,YYATYRNIFTNTYVSNLYIRYDFYTWAVLAYLWY\nHLA-B*35:75,YYATYRNIFTNTYESNLYLRYDFYTWAVLAYLWY\nHLA-B*35:76,YYATYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:77,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:78,YYATYRNIFANTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:79,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:80,YYATYREIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:81,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWH\nHLA-B*35:82,YYATYRNICTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:83,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:84,YHTTYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:85,YYATYRNICTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:86,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYEWY\nHLA-B*35:87,YYATYRNIFTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*35:88,YYATYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*35:89,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:90,YYTTYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:91,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:92,YYATYRNIFTNAYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:93,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYTWY\nHLA-B*35:94,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:95,YYATYRNISTNTYESNLYIRYNYYTWAVLAYLWY\nHLA-B*35:96,YYATYRNIFTNTYESNLYIRYDFYTWAELAYTWH\nHLA-B*35:97,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:98,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:99,YYATYRNIFTNTYESNLYLRYDSYTWAERAYLWY\nHLA-B*35:100,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:101,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:102,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:103,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:104,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:105,YYATYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:106,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:107,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:108,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:109,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWH\nHLA-B*35:110,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:111,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:112,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:113,YYATYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:114,YYATYRNIFTNTYESNLYLSYDSYTWAVLAYLWY\nHLA-B*35:115,YYATYRNIFTNTYESNLYIRYDSYTWAVDAYLWY\nHLA-B*35:116,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:117,YYATYRNIFTNTYESNLYLSYDSYTWAEWAYLWY\nHLA-B*35:118,YYATYRNIFTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:119,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:120,YYATYRNIFTNTYESNLYIRHDSYTWAVLAYLWY\nHLA-B*35:121,YYATYRNIFTNTYESNLYIRYHSYTWAVLAYLWY\nHLA-B*35:122,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:123,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:124,YYSTYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:125,YYSTYRNIFTNTYESNLYISYDSYTWAVLAYLWY\nHLA-B*35:126,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:127,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:128,YYATYRNIFTNTYESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:131,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:132,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:133,YYATYRNIFTNTYESNLYIRYVSYTWAVLAYLWY\nHLA-B*35:135,YYATYRNICTNTYESNLYLRYDSYTWAEWAYLWY\nHLA-B*35:136,YYATYRNIFTNTDESNLYIRYDFYTWAVLAYLWY\nHLA-B*35:137,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:138,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:139,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:140,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:141,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:142,YYSTYRNIFTNTYESNLYIRYDSYTWAVRAYLWY\nHLA-B*35:143,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*35:144,YYATYRNIFTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*37:01,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:02,YHSTYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*37:04,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWH\nHLA-B*37:05,YHSTYREISTNTYEDNLYIRSNFYTWAVDAYTWY\nHLA-B*37:06,YHSKYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:07,YHSTYREISTNTYEDTLYLSYDYYTWAERAYEWY\nHLA-B*37:08,YHSTYRNISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:09,YHSTYREISTNTYEDTLYLSYDYYTWAVDAYTWY\nHLA-B*37:10,YHSTYREISTNTYENTAYIRSNFYTWAVDAYTWY\nHLA-B*37:11,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B*37:12,YHSTYREISTNTYEDTLYIRYNYYTWAVDAYTWY\nHLA-B*37:13,YHSTYREISTNTYEDTLYIRSNFYTWAEDAYTWY\nHLA-B*37:14,YHSTYREISTNTYESNLYIRSNFYTWAVDAYTWY\nHLA-B*37:15,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:17,YHSTYREISTNTYEDTLYIRSNFYTWTVDAYTWY\nHLA-B*37:18,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:19,YHSTYREISTNTYEDTLYIRYNFYTWAVDAYTWY\nHLA-B*37:20,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:21,YHSTYREIFTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:22,YHATYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*37:23,YHSTYREISTNTYEDTLYIRSNFYTWAVDAYTWY\nHLA-B*38:01,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:02,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:03,YYSEYREISTNTDESTAYLRYNFYTWAVLTYTWY\nHLA-B*38:04,YYSEYREICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:05,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:06,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:07,YYSTYRNIFTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:08,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYMWY\nHLA-B*38:09,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:10,YYSEYRNICTNTYENIAYLRYNFYTWAELAYTWY\nHLA-B*38:11,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:12,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:13,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:14,YYSEYRNICTNTDENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:15,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:16,YYTEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:17,YYSEYRNICTNTYEDTLYLRYNFYTWAVLTYTWY\nHLA-B*38:18,YYSEYRNICTNTYENTAYLRYNFYTWAVLTYTWY\nHLA-B*38:19,YYSEYRNICTNTYENIAYLSYNFYTWAVLTYTWY\nHLA-B*38:20,YYSEYRNICTNTYENIAYIRYNFYTWAVLTYTWY\nHLA-B*38:21,YYSEYRNICTNTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*38:22,YYSEYRNICTNTYENIAYLNYNFYTWAVLTYKWY\nHLA-B*38:23,YYSEYRNICTNTYENTAYFRYNFYTWAVLTYTWY\nHLA-B*39:01,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:02,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:03,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:04,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:05,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:06,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B*39:07,YYSEYRNICTNTYESNLYLRYDSYTWAVLTYTWY\nHLA-B*39:08,YYSEYREISTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B*39:09,YYSEYRNICTNTDESNLYLRSNFYTWAVLTYTWY\nHLA-B*39:10,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:11,YYSEYRNICTNTYESNLYLRYNFYTWAVRTYTWY\nHLA-B*39:12,YDSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:13,YYSEYREISTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:14,YYSEYRNICTNTDESNLYLSYNYYTWAVLTYTWY\nHLA-B*39:15,YYSEYRNICTNTDESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:16,YYSEYRNIYTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:17,YYSEYRNIYTNTDESNLYLRYNLYTWAVLTYTWY\nHLA-B*39:18,YYSEYRNICTNTDESNLYLRYNFYTWAEWTYTWY\nHLA-B*39:19,YYSTYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:20,YYSEYRNIYTNTYENNLYLRYNFYTWAVLTYTWY\nHLA-B*39:22,YYSEYREICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:23,YYSEYREISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:24,YYSEYRNICTNTDESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:26,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:27,YYSEYRNICTNTDVSNLYLRYNFYTWAVLTYTWY\nHLA-B*39:28,YYSEYRNICTNTDESNLYLTYNFYTWAVLTYTWY\nHLA-B*39:29,YYSEYRNICTNTDESNLYLSYDYYTWAVLTYTWY\nHLA-B*39:30,YYSEYRNICTNTDESNLYLRYNFYTWAVLAYTWY\nHLA-B*39:31,YHSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:32,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYLWH\nHLA-B*39:33,YYSEYRNICTNTDESNLYWTYNFYTWAVRAYLWY\nHLA-B*39:34,YYSEYRNICTNTDESNLYWTYNFYTWAVLAYTWY\nHLA-B*39:35,YYSEYRNICTNTDESNLYLRYNFYTWAELTYTWY\nHLA-B*39:36,YYSEYRNICTNTDESNLYLRYNFYTWAEWAYTWY\nHLA-B*39:37,YYSEYRNICTNTYESNLYLSYNFYTWAVLTYTWY\nHLA-B*39:39,YYSEYRNISTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:41,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:42,YYSEYRNICTNTDESNLYIRYNFYTWAVLTYTWY\nHLA-B*39:43,YYSEYRNICTNTDESNLYLRYDSYTWAVLAYTWH\nHLA-B*39:44,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:45,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:46,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:47,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYEWY\nHLA-B*39:48,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWH\nHLA-B*39:49,YYSEYREISTNTYESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:50,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYEWY\nHLA-B*39:51,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:52,YYSEYRNICTDTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:53,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:54,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:55,YYSEYRNICTNTYESNLYLRYDFYTWAVLTYTWY\nHLA-B*39:56,YYSEYRNICTNTYESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:57,YYSEYRNICTNTDESNLYWTYNFYTWAVLTYTWY\nHLA-B*39:58,YYSEYRNIFTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:59,YYSEYRNICTNTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*39:60,YYSEYRNICTNTDESNLYLRYNFYTWAALTYTWY\nHLA-B*40:01,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:02,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:03,YHTKYREISTNTYESNLYLSYDSYTWAVLAYEWY\nHLA-B*40:04,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:05,YHTKYREISTNTYESNLYLSYNYYTWAELAYLWY\nHLA-B*40:06,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:07,YHTKYREIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:08,YHTKYRNIFTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:09,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:10,YYAKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:11,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:12,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:13,YHTKYREIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*40:14,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:15,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*40:16,YHTKYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*40:18,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:19,YHTKYREISTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*40:20,YHTKYREISTNTYESNLYLRYDSYTWAVLAYEWY\nHLA-B*40:21,YYAMYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:23,YHTKYREISTNTYESNLYLRYNYYSWAERAYEWY\nHLA-B*40:24,YHTKYREISTNTYESNLYLSYDYYTWAVLAYEWY\nHLA-B*40:25,YHTKYRNISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:26,YHTKYREISTNTYESNLYLRYNYYTWAELAYLWY\nHLA-B*40:27,YHTKYREISTNTYESNLYLSYNNYTWAVLAYEWY\nHLA-B*40:28,YHTKYREISTNTYESNLYIRYNYYTWAELAYLWH\nHLA-B*40:29,YHTKYPEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:30,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B*40:31,YHTKYREISTNTYESNLYLSYDYYSLAVLAYEWY\nHLA-B*40:32,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*40:33,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B*40:34,YHTKYREISTNTYESNLYIVYNYYSLAVLAYEWY\nHLA-B*40:35,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:36,YHTKYREISTNTYESNLYLRYNFYSLAVLAYEWY\nHLA-B*40:37,YHTKYREISTNTYENNLYLSYNYYTWAVLAYEWY\nHLA-B*40:38,YHTKYREISTNTYESNLYLRYNSYSLAVLAYEWY\nHLA-B*40:39,YHTKYREISTNTYESNLYLSYNYYTWAVLAYTWY\nHLA-B*40:40,YYTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:42,YHTKYREISTNTYESNLYLRYDYYSLAVLAYEWY\nHLA-B*40:43,YHTKYREISTNTDESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:44,YHTKYREISTNTYESNLYWTYDYYTWAVLAYEWY\nHLA-B*40:45,YHTKYREISTNTYESNLYLSYNYYSWAVLAYEWY\nHLA-B*40:46,YHTEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:47,YHTKYREISTNTYENTAYLRYNYYSLAVLAYEWY\nHLA-B*40:48,YHTKYREISTNTYESNLYLRYNLYSLAVLAYEWY\nHLA-B*40:49,YYTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:50,YHTKYREISTNTDESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:51,YHTKYREISTNTYESNLYLRYNYYSWAELAYTWH\nHLA-B*40:52,YHTKYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*40:53,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:54,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:55,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:56,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:57,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:58,YYAKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:59,YHTKYREISTNTYESNLYIRYDSYSLAVLAYEWY\nHLA-B*40:60,YHTKYREISTNTYESNLYLRSDSYSLAVLAYEWY\nHLA-B*40:61,YHTKYREIYTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:62,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:63,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B*40:64,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:65,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:66,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:67,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:68,YHTKYRNIFTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:69,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:70,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:71,YHTKYREISTNTYESNLYLSYNLYTWAVLAYLWY\nHLA-B*40:72,YHTKYREISTNTYESNLYLRYNYYTLAVLAYEWY\nHLA-B*40:73,YHTKYREISTNTYVSNLYLRYNYYSLAVLAYEWY\nHLA-B*40:74,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:75,YHTKYREICTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:76,YHTKYREISTNTYESKLYLRYNYYSLAVLAYEWY\nHLA-B*40:77,YHTKYREISTNTYESNLYLRYNFYTLAVLAYEWY\nHLA-B*40:78,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:79,YHTKYREISTNTYESNLHLRYNYYSLAVLAYEWY\nHLA-B*40:80,YHTKYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*40:81,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:82,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:83,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:84,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:85,YHTKYREISTNTYESNLYLSYNYYIWAVLAYEWY\nHLA-B*40:86,YHTKYREISTNTYESNLYWTYNFYTWAVLAYEWY\nHLA-B*40:87,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:88,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:89,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:90,YHTKYREISTNTYESNLYLSYNYYTWAVLAHEWY\nHLA-B*40:91,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:92,YHTKYREISTNTYESNLYLRYNYYSLAVLAYLWY\nHLA-B*40:93,YHTEYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:94,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:95,YHTKYREISTNTYESNLYWTYNYYTWAELAYEWY\nHLA-B*40:96,YHTKYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B*40:97,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:98,YHTKYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*40:99,YHTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:100,YHTKYREISTNTYESNLYLRFNYYSLAVLAYEWY\nHLA-B*40:101,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:102,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:103,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:104,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:105,YHTKYREISTNTYESNLYLSYNSYTWAVLAYEWY\nHLA-B*40:106,YHTKYRNIFTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:107,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:108,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:109,YHTKYREISTNTYESIAYWTYNYYTWAVLAYEWY\nHLA-B*40:110,YYTKYREISTNTYENTAYWTYNYYTWAVLAYEWY\nHLA-B*40:111,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:112,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:113,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWH\nHLA-B*40:114,YHTKYREISTNTYESNLYLRYNYYSWAVLAYEWY\nHLA-B*40:115,YHTKYWEISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:116,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:117,YHTKYREISTNTYENIAYLRYNYYSLAVLAYEWY\nHLA-B*40:119,YHTKYREISTNTYDSNLYLSYNYYTWAVLAYEWY\nHLA-B*40:120,YHTKYREISTNTYESNLYIRYDYYTWAVLAYEWY\nHLA-B*40:121,YHTKYREISTNTYESNLYLRYNYYTWAVLAYEWY\nHLA-B*40:122,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:123,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:124,YHTKYREISTNTYESNLYLRYHDYSLAVLAYEWY\nHLA-B*40:125,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:126,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:127,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:128,YHTKYREISTNTYESNLYLRYNYYSLAVRAYEWY\nHLA-B*40:129,YYTKYREISTNTYESNLYIRYNYYTWAVLAYEWY\nHLA-B*40:130,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:131,YHTKYREISTNTYESNLYWTYNYYTWAVLAYEWY\nHLA-B*40:132,YHTKYREISTNTYESNLYLRYNYYSLAVLAYESY\nHLA-B*40:134,YHTKYREISTNIYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:135,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:136,YHTKYREISTNTYESNLYLRYNYYTWAVDAYEWY\nHLA-B*40:137,YYAMYREISTNTYESNLYIRYNYYSLAVLAYEWY\nHLA-B*40:138,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:139,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:140,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:141,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:143,YHTKYREISTNTYESNLYLSFNYYTWAVLAYEWY\nHLA-B*40:145,YHTKYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*40:146,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*40:147,YHTKYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*41:01,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:02,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:03,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B*41:04,YHTKYREISTNTYESNLYLSYDYYTWAVDAYTWY\nHLA-B*41:05,YHTKYREISTNTYESKLYWRYNYYTWAVDAYTWY\nHLA-B*41:06,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:07,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:08,YHTKYREISTNTYESNLYLRYNYYTWAVDAYTWY\nHLA-B*41:09,YHTKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*41:10,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:11,YHTKYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*41:12,YHAKYREISTNTYESNLYWRYNYYTWAVDAYTWY\nHLA-B*42:01,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:02,YHSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:04,YYSEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*42:05,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:06,YYSEYRNIYAQTDESNLYLSYNFYTWAVDAYTWY\nHLA-B*42:07,YYSEYRNIYTNTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:08,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:09,YHSEYRNIYAQTDESNLYLSYDSYTWAVDAYTWY\nHLA-B*42:10,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:11,YYSEYREISTNTYESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:12,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*42:13,YYSEYRNIYAQTDESNLYIRYNYYTWAVDAYTWY\nHLA-B*42:14,YYSEYRNIYAQTDESNLYLSYNYYTWAVDAYTWY\nHLA-B*44:02,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:03,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:04,YYTKYREISTNTYENTAYIRYDDYTWAVRAYTSY\nHLA-B*44:05,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:06,YYATYRNIFTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B*44:07,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:08,YYTMYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:09,YYTKYREISTNTYESNLYIRYDDYTWAVDAYLSY\nHLA-B*44:10,YYTKYREISTNTYENTAYIRFNLYTWAVLAYLSY\nHLA-B*44:11,YYTKYREISTNTYENTPYIRYDDYTWAVDAYLSY\nHLA-B*44:12,YYTKYRNISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:13,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:14,YYTKYREISTNTYENTAYIRYNDYTWAVDAYLSY\nHLA-B*44:15,YHTKYREISTNTYESTAYWRYNLYTWAVDAYLSY\nHLA-B*44:16,YYTKYREISTNTYENTAYIRYDDYTWAVDAYEWY\nHLA-B*44:17,YYTKYREISTNTYENTAYIRYDSYTWAVDAYLSY\nHLA-B*44:18,YHTKYREISTNTYENIAYWRYNLYTWAVDAYLSY\nHLA-B*44:20,YYTKYREISTNTYENTAYWTYDDYTWAVDAYLSY\nHLA-B*44:21,YYTKYREISTNTYENTAYIRYDDYTWAVDAYESY\nHLA-B*44:22,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:24,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:25,YYTKYREISTNTYENIAYIRYDYYTWAVDAYLSY\nHLA-B*44:26,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:27,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:28,YYTKYREISTNTYENTAYIRYDDYTWAVRAYLSY\nHLA-B*44:29,YYTKYREISTNTYENTAYIRYDDYTWAVLTYLSY\nHLA-B*44:30,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:31,YYTKYREISTNTYENTAYLRYNYYSLAVLAYESY\nHLA-B*44:32,YYTKYPEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:33,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:34,YYAKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:35,YYTKYREISTNTYENTAYIRYDDYTWAVEAYLSY\nHLA-B*44:36,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:37,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLWY\nHLA-B*44:38,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:39,YYPKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:40,YYTEYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:41,YYTKYREISTNTYENTAYLRYDDYTWAVDAYLSY\nHLA-B*44:42,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLWY\nHLA-B*44:43,YYTKYREISTNTYENTAYIRYDSYTWAVLAYLSY\nHLA-B*44:44,YYTEYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:45,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:46,YYTKYREISTNTYESNLYIRYDDYTWAVLAYLSY\nHLA-B*44:47,YYTKYREISTNTYENTAYWTYDDYTWAVLAYLSY\nHLA-B*44:48,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:49,YDTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:50,YYTKYREISTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B*44:51,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:53,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:54,YYTKYREISTNTYENTAYLSYDDYTWAVLAYLSY\nHLA-B*44:55,YHTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:57,YYTMYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:59,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:60,YYTMYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:62,YYTKYREISTNTYENTAYIRYNYYTWAVDAYLSY\nHLA-B*44:63,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:64,YYTKYREISTNTYENTAYIRYDDYTWAVLAYEWY\nHLA-B*44:65,YYTKYREISTNTYENTAYLRYDDYTWAVLAYLSY\nHLA-B*44:66,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:67,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:68,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:69,YYTKYWEISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:70,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:71,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:72,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:73,YYTKYREISTNTYENTAYIRYDDYTWAVDGYLSY\nHLA-B*44:74,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:75,YYTKYREISTNTYENNLYIRYDYYTWAVDAYLSY\nHLA-B*44:76,YYTKYREISTNTYENTAYIRYDDYTWAERAYLSY\nHLA-B*44:77,YYTKYREISTNTYENTAYIRYNYYTWAVLAYLSY\nHLA-B*44:78,YYTKYREISTNTYENTAYIRYDYYTWAVDAYLSY\nHLA-B*44:79,YYTKYREISTNTYENTAYIRYDDYTWAELAYLSY\nHLA-B*44:80,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:81,YYTNYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:82,YYTKYREISTNTYENTAYIRYNYYTWAVLAYLSY\nHLA-B*44:83,YYATYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:84,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:85,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:86,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:87,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:88,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:89,YYTKYREISTNTYENTAYIRYDDYTWAVDTYLSY\nHLA-B*44:90,YYTKYREIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-B*44:91,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLWY\nHLA-B*44:92,YYTKYREISTNTYENTAYIRYDDYTWAMLAYLSY\nHLA-B*44:93,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:94,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:95,YYTKYREISTNTYENIAYIRYDDYTWAVDAYLSY\nHLA-B*44:96,YYTKYREISTNTYENTAYIGYDDYTWAVLAYLSY\nHLA-B*44:97,YYTKYREICAKTDENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:98,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:99,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:100,YYTKYREISTNTYENTAYWRYDDYTWAVDAYLSY\nHLA-B*44:101,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:102,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:103,YHTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:104,YYTKYREISTNTYENTAYIRYDDYTWAVDAYLSY\nHLA-B*44:105,YYTKYREISTNTYENTAYIRYDDYTWAVLAYLSY\nHLA-B*44:106,YYTKYREISTNTYENTAYLSYDDYTWAVDAYLSY\nHLA-B*44:107,YYTKYREISTNTYENTAYIRYDYYTWAVLAYLSY\nHLA-B*44:109,YYTKYREISTNTYESTAYIRYDDYTWAVLAYLSY\nHLA-B*44:110,YYTKYREISTNTYENTAYISYDDYTWAVLAYLSY\nHLA-B*45:01,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:02,YHTKYREISTNTYESNLYWRYNFYTWAVDAYLSY\nHLA-B*45:03,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:04,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLWY\nHLA-B*45:05,YHTKYREISTNTYESNLYWRYNLYTWAVDVYLSY\nHLA-B*45:06,YHTKYREIYAQTDESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:07,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:08,YHTKYREISTNTYESNLYWRYNLYTWAVDAYTWY\nHLA-B*45:09,YHTKYREISTNTYESNLYWRYDSYTWAVDAYLSY\nHLA-B*45:10,YHTKYREISTNTYESNLYWRYNLYTWAVDAYEWY\nHLA-B*45:11,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*45:12,YHTKYREISTNTYESNLYWRYNLYTWAVDAYLSY\nHLA-B*46:01,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:02,YYAMYREKYRQTGVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:03,YYAMYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-B*46:04,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:05,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:06,YYAMYREKYRQTDVSNLYLRYDSYSLAVLAYEWY\nHLA-B*46:08,YYAMYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-B*46:09,YYAMYREKYRQTDVSNLYLRYDSYTWAVWAYLWY\nHLA-B*46:10,YYTMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:11,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYLWY\nHLA-B*46:12,YYAMYREKYRQTDVSNLYLSYDSYTWAEWAYLWY\nHLA-B*46:13,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYLWY\nHLA-B*46:14,YHAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:16,YYAMYREKFRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:17,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLSY\nHLA-B*46:18,YYAMYREKYRQTDVSNLYWTYNLYTWAVLAYTWY\nHLA-B*46:19,YYAMYREKYRQTDVSNLYLRYDSYTWAVLTYLWY\nHLA-B*46:20,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:21,YYAMYREKYRQTDVSNLYLRYDSYTWAVLAYTWY\nHLA-B*46:22,YYAMYREKYRRTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:23,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*46:24,YYAMYREKYRQTDVSNLYLRYDSYTWAEWAYLWY\nHLA-B*47:01,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*47:02,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B*47:03,YYTKYREISTNTYESNLYLRFHDYTWAVLAYEWY\nHLA-B*47:04,YYTKYREISTNTYENTAYLNYHDYTWAVLAYEWY\nHLA-B*47:05,YYTKYREISTNTYEDTLYLNYHDYTWAVLAYEWY\nHLA-B*47:06,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*47:07,YYTKYREISTNTYEDTLYLRFHDYTWAVLAYEWY\nHLA-B*48:01,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:02,YYSEYREISTNTYESNLYIRYDSYTWAVLAYLWY\nHLA-B*48:03,YYSEYREISTNTYESNLYLRYNYYSLAVLAYEWY\nHLA-B*48:04,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:05,YYSEYREISTNTYESNLYLSYNYYTWAERAYEWY\nHLA-B*48:06,YYSEYRNIFTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:07,YYSEYREISTNTYESNLYLSYNFYSLAVLAYEWY\nHLA-B*48:08,YYSEYREISTNTYESNLYLSYDYYTWAERAYEWY\nHLA-B*48:09,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:10,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:11,YYSEYREISTNTYESNLYLSYNYYSLAVLAYELY\nHLA-B*48:12,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:13,YYSEYREISTNTYESNLYLSYNYYTWAVLAYEWY\nHLA-B*48:14,YYSEYREISTNTYESNLYLSYNSYTLAVLAYEWY\nHLA-B*48:15,YYSEYREISTNTYESNLYLSYNYYSLAELAYEWY\nHLA-B*48:16,YYSEYRVISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:17,YYSEYREISTNTYESNLYIRYNFYSLAVLAYEWY\nHLA-B*48:18,YYSEYREISTNTYESIAYLSYNYYSLAVLAYEWY\nHLA-B*48:19,YYSEYREISTNTYESNLYLSYNYYSLAVWAYEWY\nHLA-B*48:20,YYSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:21,YYSEYREISTNTYESNLYLNYNYYSLAVLAYEWY\nHLA-B*48:22,YHSEYREISTNTYESNLYLSYNYYSLAVLAYEWY\nHLA-B*48:23,YYSEYREISTNTYESNLYLRYDSYSLAVLAYEWY\nHLA-B*49:01,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:02,YHTKYREISTNTYENTAYWRYNLYTWAELAYLWY\nHLA-B*49:03,YHATYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:04,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B*49:05,YHTKYREISTNTYENIAYWRYDSYTWAELAYLWY\nHLA-B*49:06,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:07,YHTKYREISTNTYENIAYWRYNLYTWAELAYEWY\nHLA-B*49:08,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*49:09,YHTKYREISTNTYENIAYWRYNLYTWAELAYLWH\nHLA-B*49:10,YYTKYREISTNTYENIAYWRYNLYTWAELAYLWY\nHLA-B*50:01,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:02,YHTKYREISTNTYESNLYWRYNLYTWAELAYLSY\nHLA-B*50:04,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:05,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:06,YHTRYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:07,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:08,YHTKYREISTNTYESNLYWRYNLYTWAELAYLWY\nHLA-B*50:09,YHTKYREISTNTYESNLYWRYNFYTWAELAYLWY\nHLA-B*51:01,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:02,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B*51:03,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLGH\nHLA-B*51:04,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B*51:05,YYATYRNIFTNTYENIAYWTYNYYTWAVRAYLWY\nHLA-B*51:06,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWH\nHLA-B*51:07,YYATYRNISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:08,YYATYRNIFTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B*51:09,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*51:12,YYATYRNIFTNTYENIADWTYNYYTWAELAYLWH\nHLA-B*51:13,YYATYRNIFTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B*51:14,YYATYRNIFTNTYENIAYWTYKYYTWAELAYLWH\nHLA-B*51:15,YYATYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B*51:16,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B*51:17,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:18,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:19,YYATYRNIFTNTYENIAYWTYNYYTWAVLTYLWH\nHLA-B*51:20,YYATYRNIFTNTDENIAYWTYNYYTWAVDAYLWH\nHLA-B*51:21,YYATYRNIFTNTYENIAYWTYNYYTWAELAYTWH\nHLA-B*51:22,YYATYRNICTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:23,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLSY\nHLA-B*51:24,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:26,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:28,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:29,YYATYRNIFTNTYENIAYWTYNYYTWAERAYLWH\nHLA-B*51:30,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:31,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWH\nHLA-B*51:32,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:33,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:34,YYATYRNIFTNTYENIAYWTYNYYTWAELAYEWY\nHLA-B*51:35,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:36,YYATYRNIFTNTYENIAYWTYNYYTWAEDAYTWY\nHLA-B*51:37,YYATYRNIFTNTYENIAYWTYDSYTWAELAYLWH\nHLA-B*51:38,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:39,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:40,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B*51:42,YYATYRNIFTNTYENIAYIRYDDYTWAVLAYLSY\nHLA-B*51:43,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:45,YYATYRNIFTNTYENIAYLRYDSYTWAELAYLWH\nHLA-B*51:46,YYATYRNIFTNTYENIAYITYNYYTWAELAYLWH\nHLA-B*51:48,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:49,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:50,YYATYRNIFTNTYENGLYWTYNYYTWAELAYLWH\nHLA-B*51:51,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:52,YYATYRNIFTNTHENIAYWTYNYYTWAELAYLWH\nHLA-B*51:53,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:54,YYATYRNIFTNTYENTAYWTYNYYTWAVRAYLWY\nHLA-B*51:55,YYATYRNIFTNTYENIAYWTYNYYTWAEQAYLWH\nHLA-B*51:56,YYATYRNIFTNTYENIAYIRYNYYTWAELAYLWH\nHLA-B*51:57,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:58,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:59,YYATYRNIFTNTYENIAYLRYNYYTWAELAYLWY\nHLA-B*51:60,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:61,YYATYRNIFTNTYENIAYWTYNYYTWAEWAYLWH\nHLA-B*51:62,YYATYRNIFTNTYENIAYLRYNLYTWAELAYLWH\nHLA-B*51:63,YYATYRNIFTNTYENIAYWTYNSYTWAELAYLWH\nHLA-B*51:64,YYATYRNIFTNTYENIAYLSYNYYTWAELAYLWH\nHLA-B*51:65,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:66,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:67,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:68,YDATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:69,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:70,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:71,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:72,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:73,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*51:74,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:75,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:76,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:77,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:78,YYATYRNIFTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B*51:79,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:80,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:81,YYATYRNIFTNTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*51:82,YYATYRNIFTNTYENIAYWTYNYYTWAERAYEWH\nHLA-B*51:83,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:84,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:85,YYATYRNIFTNTYENIAYWTYHDYTWAELAYLWH\nHLA-B*51:86,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:87,YYATYRNIFTNTYENIAYWTYNYYTWADLAYLWH\nHLA-B*51:88,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:89,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:90,YYATYRNIFTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B*51:91,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:92,YYATYRNIFTNTYENIAYWTYDFYTWAELAYLWH\nHLA-B*51:93,YYATYRNIFTNTYENIAYWTYNYYTWAVLAYEWY\nHLA-B*51:94,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:95,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*51:96,YYATYRNIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:01,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:02,YYAMYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:03,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWY\nHLA-B*52:04,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:05,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:06,YYATYREIFTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:07,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:08,YYATYREISTNTYENIAYWTYNYYTWAELAYEWH\nHLA-B*52:09,YYATYREISTNTYESIAYWTYNYYTWAELAYLWH\nHLA-B*52:10,YYATYREISTNTYENIAYWTYNYYTWAVLAYLWH\nHLA-B*52:11,YYATYREISTNTYENIAYWTYNYYTWAELAYLWY\nHLA-B*52:12,YYATYREISTNTYENIAYWTYDYYTWAELAYLWH\nHLA-B*52:13,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:14,YYATYREISTNTYENIAYWTYNFYTWAELAYLWH\nHLA-B*52:15,YYATYREISTNTYENIAYWTYNYYTWAELAYLSH\nHLA-B*52:16,YYSEYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:17,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:18,YYATYREISTNTYENIAYWTYNYYTWAELAYLWH\nHLA-B*52:19,YYATYREISTNTYENIAYWTYNYYTWAVDAYLWH\nHLA-B*52:20,YYATYREISTNTYENTAYWTYNYYTWAELAYLWH\nHLA-B*52:21,YYATYREISTNTYENIAYWTYNYYTWAEWAYLWH\nHLA-B*53:01,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:02,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWH\nHLA-B*53:03,YYATYRNIFTNTYEDTLYIRYDSYTWAVLAYLWY\nHLA-B*53:04,YYATYRNIFTNTYENIAYIRYDFYTWAVLAYLWY\nHLA-B*53:05,YYATYRNIFTNTYESIAYIRYDSYTWAVLAYLWY\nHLA-B*53:06,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWH\nHLA-B*53:07,YYATYRNIFTNTYENIAYIRSNFYTWAVLAYLWY\nHLA-B*53:08,YYATYRNIFTNTYENIAYIRYDSYTWAELAYLWY\nHLA-B*53:09,YYATYRNISTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B*53:10,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:11,YYATYRNIFTNTYENTAYIRYDSYTWAVRAYLWY\nHLA-B*53:12,YYATYRNIFTNTYESTAYIRYDSYTWAVLAYLWY\nHLA-B*53:13,YYATYRNIFTNTYENTAYIRYDSYTWAVLAYLWY\nHLA-B*53:14,YYATYRNIFTNTYENIAYLSYDSYTWAVLAYLWY\nHLA-B*53:15,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:16,YYATYRNIFTNTYESIAYIRYDSYTWAVRAYLWY\nHLA-B*53:17,YYATYREISTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:18,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:19,YYATYRNIFTNTYENIAYIRYNYYTWAVLAYLWY\nHLA-B*53:20,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:21,YYATYRNIFTNTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*53:22,YYATYRNIFTNTYENIAYIRYDSYTWAVDAYLSY\nHLA-B*53:23,YYATYRNIFTNTDENIAYIRYDSYTWAVLAYLWY\nHLA-B*54:01,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:02,YHAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:03,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*54:04,YYAGYRNIYAQTDESNLYWTYNYYTWAVLAYTWY\nHLA-B*54:06,YYAGYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*54:07,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:09,YYAGYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B*54:10,YYAGYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B*54:11,YYAGYRNIYAQTDESNLYWTYNYYSWAVLAYTWY\nHLA-B*54:12,YYAGYRNIYAQTDENIAYWTYNLYTWAVLAYTWY\nHLA-B*54:13,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:14,YYAGYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B*54:15,YYAGYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B*54:16,YYAGYRNIYAQTDESNLYWTYDLYTWAVLAYTWY\nHLA-B*54:17,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:18,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:19,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:20,YYAGYRNIYAQTDESNLYWTYNLYTWAERAYTWY\nHLA-B*54:21,YYSGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*54:22,YYAGYRNIYAQTDESNLYWTYNLYSWAVLAYTWY\nHLA-B*54:23,YYAGYRNIYAQTEESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:01,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:02,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:03,YYAEYRNIYAQTDVSNLYWTYNLYTWAELAYTWY\nHLA-B*55:04,YYAEYRNIYAQTDESNLYLSYNYYTWAVLAYTWY\nHLA-B*55:05,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:07,YYAGYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:08,YYAEYRNIYAQTDESNLYLRYNYYTWAVLAYLWY\nHLA-B*55:09,YYAEYRNIYAQTDESNLYWTYNLYTWAERAYEWY\nHLA-B*55:10,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:11,YYAEYRNIYAQTDESNLYWMYNLYTWAELAYTWY\nHLA-B*55:12,YYAEYRNIYAQTDENNLYWTYNLYTWAVLAYTWY\nHLA-B*55:13,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYTWY\nHLA-B*55:14,YYAEYRNIYAQTDESNLYIVYDSYTWAELAYTWY\nHLA-B*55:15,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:16,YHAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:17,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B*55:18,YYAEYREISTNTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:19,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:20,YYAEYRNIYAQTDESNLYWTYNYYTWAVDAYTWY\nHLA-B*55:21,YYAEYRNIYAQTDESNLYWTYNLYTWAEWAYTWY\nHLA-B*55:22,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYEWY\nHLA-B*55:23,YYAEYRNIYAQTDESNLYWTYDSYTWAVLAYTWY\nHLA-B*55:24,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYEWY\nHLA-B*55:25,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:26,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:27,YYAEYRNIYAQTDESNLYWTYDYYTWAVLAYTWY\nHLA-B*55:28,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYTWY\nHLA-B*55:29,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:30,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:31,YYAEYRNIYAQTYESNLYWTYNLYTWAELAYTWY\nHLA-B*55:32,YYAEYRNIYAQTDESNLYWTYNSYTWAVLAYTWY\nHLA-B*55:33,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:34,YYAEYREISAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:35,YYAMYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:36,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:37,YYAEYRNIYAQTDESNLYWTYNLYTWAVRAYTWY\nHLA-B*55:38,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:39,YYAEYRNIYAQTDESNLYWTYNLHTWAVLAYTWY\nHLA-B*55:40,YYAEYREIYAQTDESNLYWTYNLYTWAELAYTWY\nHLA-B*55:41,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:42,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*55:43,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYTWY\nHLA-B*56:01,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:02,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B*56:03,YYAEYRNIYAQTDESNLYLRYDSYTWAEWAYLWY\nHLA-B*56:04,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYLWY\nHLA-B*56:05,YYAEYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B*56:06,YYATYRNIYAQTDESNLYWTYNYYTWAELAYLWH\nHLA-B*56:07,YYAEYRNIYAQTDENTAYWTYNLYTWAVLAYLWY\nHLA-B*56:08,YYAEYREKYGQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:09,YYAEYRNIYAQTDESNLYIRYDSYTWAVLAYLWY\nHLA-B*56:10,YYAEYRNIYAQTDESNLYLRYNLYTWAVLAYTWY\nHLA-B*56:11,YYAEYRNIYAQTDESNLYIRYDFYTWAVLAYLWY\nHLA-B*56:12,YYAEYRNIYAQTDESNLYIRYNYYTWAVLAYTWY\nHLA-B*56:13,YYAEYRNIYAQTDESNLYWTYNLYTWAVDAYLWY\nHLA-B*56:14,YYAEYREKYRQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:15,YYAEYRNIYAQTDESNLYWTYNYYTWAVLAYLWY\nHLA-B*56:16,YYSEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:17,YYAEYRNIYANTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:18,YYAEYRNIYAQTDESNLYLRYDSYTWAVLAYTWY\nHLA-B*56:20,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:21,YYAEYRNIYAQTDENIAYWTYNYYTWAELAYLWH\nHLA-B*56:22,YYAEYRNIYAQTDESNLYWTYNFYTWAVLAYLWY\nHLA-B*56:23,YYAEYRNIYANTYESNLYWTYNLYTWAVLAYTWY\nHLA-B*56:24,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:25,YYAEYRNIYAQTDESNLYWTYNLYTWAELAYLWY\nHLA-B*56:26,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:27,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*56:29,YYAEYRNIYAQTDESNLYWTYNLYTWAVLAYLWY\nHLA-B*57:01,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:02,YYAMYGENMASTYENIAYIVYNYYTWAVRAYLWY\nHLA-B*57:03,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B*57:04,YYAMYGENMASTYENIAYIVYDDYTWAVRAYLWY\nHLA-B*57:05,YYAMYGENMASTYENIAYIRYNYYTWAVRAYLWY\nHLA-B*57:06,YYAMYGENMASTYENIAYIVYDSYIWAVLAYLWY\nHLA-B*57:07,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLSY\nHLA-B*57:08,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:09,YYAMYGENMASTYENIAYIVYNYYTWAEDAYLWY\nHLA-B*57:10,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:11,YYAMYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*57:12,YYAMYGENMASTYESNLYIVYNYYTWAVRAYLWY\nHLA-B*57:13,YYAMYGENMASTYENIAYIVYDSYTWAERAYEWY\nHLA-B*57:14,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWH\nHLA-B*57:15,YYAMYGENVASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:16,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:17,YYAMYGENMASTYENIAYIVYNYYTWAVLAYLWY\nHLA-B*57:18,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:19,YYAMYGENMASTYENIAYIVYDSYTWAVRAYLWY\nHLA-B*57:20,YYAMYGKNMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:21,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:22,YYAMYGENMASTYENIAYIVYDSYTWAELAYLWY\nHLA-B*57:23,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:24,YYAMYGENMASTYENIAYIVYDSYTWAVDAYLWY\nHLA-B*57:25,YYAMYGENMASTYENIAYIVYDSYTWAVLAYEWY\nHLA-B*57:26,YYAMYGENMASTYENIAYIVYDSYTWAVLAYTWY\nHLA-B*57:27,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:29,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:30,YYAMYGENMASTYENIAYIVYDSYTWAARAYLWY\nHLA-B*57:31,YYAMYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*57:32,YYAMYGENMASTYENIAYIVYHDYTWAVLAYLWY\nHLA-B*58:01,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:02,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*58:04,YYATYEENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:05,YYATYGENMASTYENIAYIRYDSYTLAALAYTWY\nHLA-B*58:06,YYATYGENMASTYENIAYLWYDSYTWAELAYLWY\nHLA-B*58:07,YYATYGENMASTYENIAYLWYDSYTWAVLAYLSY\nHLA-B*58:08,YYATYGENMASTYENIAYWTYNYYTWAELAYLWH\nHLA-B*58:09,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWH\nHLA-B*58:11,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:12,YYSTYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:13,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:14,YYATYGENMASTYENIAYIVYDSYTWAVLAYLWY\nHLA-B*58:15,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:16,YYATYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B*58:18,YYATYGENMASTYENIAYLSYDSYTWAVLAYLWY\nHLA-B*58:19,YYATYGENMASTYENIAYIRYDSYTWAELAYLWY\nHLA-B*58:20,YYATYGENMASTYENIAYLRYNFYTWAVLTYTWY\nHLA-B*58:21,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:22,YYATYGENMASTYENIAYIRYDSYTWAVRAYLWY\nHLA-B*58:23,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:24,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:25,YYATYGENMASTYENIAYLWYDSYTWAVLAYLWY\nHLA-B*58:26,YYATYGENMASTYENIAYLRYDSYTWAVLAYLWY\nHLA-B*58:27,YYATYGENMASTYENIAYLSYNYYTWAVLAYEWY\nHLA-B*58:28,YYATYGENMASTYENIAYIRYNYYTWAVLAYLWY\nHLA-B*58:29,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*58:30,YYATYGENMASTYENIAYIRYDSYTWAVLAYLWY\nHLA-B*59:01,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B*59:02,YYAEYRNIFTNTYENIAYWTYNYYTWAVLAYTWY\nHLA-B*59:03,YYAEYRNIFTNTYENIAYWTYNFYTWAVLAYTWY\nHLA-B*59:04,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYLWY\nHLA-B*59:05,YYAEYRNIFTNTYENIAYWTYNLYTWAVLAYTWY\nHLA-B*67:01,YYSEYRNIYAQTDESNLYLRYNFYTWAVLTYTWY\nHLA-B*67:02,YYSGYREKYRQADVSNLYLRYNFYTWAVLTYTWY\nHLA-B*73:01,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B*73:02,YHTEYRNICAKTDVGNLYWTYNFYTWAVLAYEWH\nHLA-B*78:01,YYATYRNIFTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B*78:02,YYATYRNIFTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B*78:03,YYATYRNICTNTDESNLYWTYNYYTWAELAYLWH\nHLA-B*78:04,YYATYRNIFTNTYESNLYWTYNYYTWAVLAYLWY\nHLA-B*78:05,YYATYREISTNTYESNLYWTYNYYTWAELAYLWH\nHLA-B*78:06,YYATYREISTNTYENNLYWTYNYYTWAELAYLWH\nHLA-B*78:07,YYATYRNIFTNTDESNLYWTYNYYTWAELAYTWH\nHLA-B*81:01,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:02,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:03,YYSEYRNIYAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*81:05,YYSEYRNIFAQTDESNLYLSYNYYSLAVLAYEWY\nHLA-B*82:01,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B*82:02,YYSEYRNIYAQTDESNLYLRFNLYTWAVDAYLSY\nHLA-B*82:03,YYSEYRNIYAQTDESNLYLRYNLYTWAVDAYLSY\nHLA-B*83:01,YYSEYRNIYAQTDESNLYIRYDDYTWAVDAYLSY\nHLA-C*01:02,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:03,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*01:04,YFSGYREKYRQTDVSNLYLWCDSYTWAEWAYTWY\nHLA-C*01:05,YFSGYREKYRQTDVSNLYLRSDYYTWAERAYTWY\nHLA-C*01:06,YFSGYREKYRQTDVSNLYLWCDYYTWAVRAYTWY\nHLA-C*01:07,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:08,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:09,YFSGYREKYRQTDVSNLYLWCDYYTWAEWAYTWY\nHLA-C*01:10,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYEWY\nHLA-C*01:11,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:12,YFSGYREKYRQTDVSNLYLWYDYYTWAERAYTWY\nHLA-C*01:13,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:14,YFSGYREKYRQTDVNKLYLWCDYYTWAERAYTWY\nHLA-C*01:15,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:16,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:17,YFSGYREKYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:18,YFSGYREKYHQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:19,YFSGYREKYRQTDVCNLYLWCDYYTWAERAYTWY\nHLA-C*01:20,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:21,YFSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*01:22,YFSGYREKYRQTDVSNLYLWCDYYTWAELAYTWY\nHLA-C*01:23,YFSGYREKYRQADVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:24,YFSGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*01:25,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:26,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:27,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:28,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:29,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTCY\nHLA-C*01:30,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWH\nHLA-C*01:31,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYMWY\nHLA-C*01:32,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:33,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:34,YFSGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C*01:35,YFSGYREKYRQTDVSNLYLWCDYYTWAELAYLWY\nHLA-C*01:36,YFSGYREKYRQTDVSNLYLRFDYYTWAERAYTWY\nHLA-C*01:38,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:39,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*01:40,YFSGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*02:02,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:03,YYAGYREKYRQTDVNKLYLRYDSYTWAVLAYEWY\nHLA-C*02:04,CYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:05,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYEWY\nHLA-C*02:06,YYAGYREKYRQTDVNKLYLRYDLYTWAEWAYEWY\nHLA-C*02:07,YYAGYREKYRQTDVNKLYLRYHDYTWAEWAYEWY\nHLA-C*02:08,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:09,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:10,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:11,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:12,YYAGYREKYRQADVSKLYLRYDSYTWAEWAYEWY\nHLA-C*02:13,YYAGYREKYRQTDVNKLYLRYDSYTWAAWAYEWY\nHLA-C*02:14,YDAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:15,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:16,YYAGYREKYRQTDVNKLYLRYDSYTWAELAYEWY\nHLA-C*02:17,YYAGYREKYRQTDVNKLYLWFDSYTWAEWAYEWY\nHLA-C*02:18,YYAGYREKYRQTDVNKLYLRYDSYTWAALAYEWY\nHLA-C*02:19,YYAGYREKYRQTDVNKLYLRYDYYTWAEWAYEWY\nHLA-C*02:20,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:21,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:22,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYEWY\nHLA-C*02:23,YYAGYREKYRQTDVNKLYLRYDYYTWAEWAYEWY\nHLA-C*02:24,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:26,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:27,YYAGYREKYRQTDVSNLYLRYDSYTWAEWAYEWY\nHLA-C*02:28,YYAGYREKYRQTDVNKLYLRYDSYTWAVWAYEWY\nHLA-C*02:29,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:30,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:31,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:32,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYEWY\nHLA-C*02:33,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:34,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:35,YYAGYREKYRQTDVNKLHLRYDSYTWAEWAYEWY\nHLA-C*02:36,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:37,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:39,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*02:40,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYEWY\nHLA-C*03:01,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:02,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:03,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:04,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:05,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:06,YYAGYREKYRQTDVSNLYIRYVYYTWAELAYLWY\nHLA-C*03:07,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C*03:08,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:09,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:10,YYAGYREKYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C*03:11,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:12,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C*03:13,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C*03:14,YYAGYREKYRQTDVSNLYIRYDSYTLAALAYTWY\nHLA-C*03:15,YYAGYREKYRQADVNKLYLRYDSYTWAELAYLWY\nHLA-C*03:16,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYTWY\nHLA-C*03:17,YYAGYREKYRQTDVSNLYLWYDYYTWAELAYLWY\nHLA-C*03:18,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:19,YYAGYREKYRQTDVSNLYIRYDLYTWAELAYLWY\nHLA-C*03:21,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C*03:23,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:24,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:25,YYAGYREKYRQTDVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:26,YYAGYREKYRQTDVSNLYIRYDFYTWAELAYLWY\nHLA-C*03:27,YYAGYREKYRQADVSNLYLSYDYYTWAELAYLWY\nHLA-C*03:28,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYLWY\nHLA-C*03:29,YYAGYRENYRQTDVSKLYIRYDYYTWAELAYLWY\nHLA-C*03:30,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:31,YYAGYRENYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:32,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:33,YYAGYREKYRQTDVSNLCLRYDSYTWAELAYLWY\nHLA-C*03:34,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYEWY\nHLA-C*03:35,YYAGYREKYRQTDVSNLYLRYDYYTWAELAYLWY\nHLA-C*03:36,YYAGYREKYRQTDVSNLYLRYDSYTWAVLAYLWY\nHLA-C*03:37,YYSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:38,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:39,YDSGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:40,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:41,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:42,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:43,YYAGYREKYRQTDVSNLYIRYDSYTWAELAYLWY\nHLA-C*03:44,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:45,YYAGYREKYRQTDVNKLYIRYDYYTWAELAYLWY\nHLA-C*03:46,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:47,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:48,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:49,YYAGYREKYRQTDVSNLYIRYDYYTWAERAYLWY\nHLA-C*03:50,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:51,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWH\nHLA-C*03:52,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:53,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:54,YYAGYREKYRQTDVSNLYIRYDYYTWAELPYLWY\nHLA-C*03:55,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C*03:56,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:57,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:58,YYAGYREKYRQTDVSNLYLWCNFYTWAERAYTWY\nHLA-C*03:59,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:60,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:61,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:62,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:63,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:64,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:65,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:66,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:67,YDAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:68,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:69,YYAGYREKYRQADVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:70,YYAGYREKYRQTDESNLYIRYDYYTWAELAYLWY\nHLA-C*03:71,YYAGYREKYRQTDVSNLYLWYDSYTWAELAYLWY\nHLA-C*03:72,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:73,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:74,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:75,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:76,HYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:77,YYAGYREKYRQTDVSNLYIRYDYYTWAVLAYLWY\nHLA-C*03:78,YYAGYREKYRQTDVSNLYIRYDYYTWAEMAYLWY\nHLA-C*03:79,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:80,YYAGYREKYRQTDVSNLYIRYDYYTWAEWAYTWY\nHLA-C*03:81,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:82,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:83,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:84,YYAGYREKYRQTDVSNLYLRYDSYTWAELAYLWY\nHLA-C*03:85,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:86,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*03:87,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:88,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:89,YYAGYREKYRQTDVSNLYLRFDSYTWAELAYLWY\nHLA-C*03:90,YYAGYREKYRQTDVSNLYIRSDYYTWAELAYLWY\nHLA-C*03:91,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:92,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYTWY\nHLA-C*03:93,YYAGYREKYRQTDVSNLYIRYDYYTWAELAYLWY\nHLA-C*03:94,YYAGYREKYRQTDVSNLYLWCDYYTWAERAYTWY\nHLA-C*04:01,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:03,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:04,YSAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C*04:05,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:06,YYAGYREKYRQADVNKLYLRFNFYTWAELAYTWY\nHLA-C*04:07,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:08,YSAGYREKYRQADVNKLYLRFNFYTWAERAYLWY\nHLA-C*04:10,YSAGYREKYRQTDVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:11,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:12,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:13,YSAGYREKYRQADVNKLYLRFNFYTWAALAYTWY\nHLA-C*04:14,YSAGYREKYRQADVNKLYLRFNFYTWAEQAYTWY\nHLA-C*04:15,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*04:16,YYAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:17,YSAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*04:18,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:19,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:20,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:23,YSAGYREKYRQADVNKLYLRFDFYTWAERAYTWY\nHLA-C*04:24,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:25,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:26,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:27,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:28,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:29,YSAGYREKYRQADVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:30,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:31,YSAGYREKYRQADVNKLYLRFNFYTWVERAYTWY\nHLA-C*04:32,YSAGYREKYRQADVNKLYLRFNFYTWAERAYEWY\nHLA-C*04:33,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:34,YSAGYREKYRQADVNKLYLRFNFYTWAVLAYLWY\nHLA-C*04:35,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:36,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:37,YSAGYREKYRQADVNKLYLWCNFYTWAERAYTWY\nHLA-C*04:38,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:39,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:40,YSAGYREKYRQADVNKLYFRFNFYTWAERAYTWY\nHLA-C*04:41,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:42,YDAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:43,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:44,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:45,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:46,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:47,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:48,YSAGYREKYRQADVNKLYLRFNFYTWAERPYTWY\nHLA-C*04:49,YSAGYWEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:50,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:51,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:52,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:53,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:54,YSAGYREKYRQADVNKLYLRFDSYTWAERAYTWY\nHLA-C*04:55,YSAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*04:56,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:57,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:58,YSAGYREKYRQADVNKLYLRFNFYTLAALAYTWY\nHLA-C*04:60,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:61,YSAGYREKYRQADVNKLYLRFNFYTWAARAYTWY\nHLA-C*04:62,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:63,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:64,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:65,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:66,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:67,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:68,YSAGYREKYRQADVNKLYLRFNFYTWAAQAYTWY\nHLA-C*04:69,YSAGYGEKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*04:70,YSAGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*05:01,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:03,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:04,YYAGYREKYRQTDVNKLYLRYDSYTWAERAYTWY\nHLA-C*05:05,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:06,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:08,YYAGYREKYRQTDVNKLYLRYNFYTWAEWAYTWY\nHLA-C*05:09,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:10,YYAGYREKYRQTDVNKLYIRYNFYTWAERAYTWY\nHLA-C*05:11,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYTWY\nHLA-C*05:12,YYAGYREKYRQTDVNKLYLRYNFYTWAVRAYTWY\nHLA-C*05:13,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:14,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:15,YYAGYWEKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:16,YYAGYREKYRQTDVNKLYLWYNFYTWAERAYTWY\nHLA-C*05:17,YYAGYREKYRQTDVNKLYLRYNFYTWAALAYTWY\nHLA-C*05:18,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:19,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:20,YYAGYREKYRQTDVNNLYLRYNFYTWAERAYTWY\nHLA-C*05:21,YYAGYREKYRQTDVNKLHLRYNFYTWAERAYTWY\nHLA-C*05:22,YYAGYREKYRQTDVNKLYLRYDFYTWAERAYTWY\nHLA-C*05:23,YYAGYREKYRQTDVNKLYLRYNFYTLAERAYTWY\nHLA-C*05:24,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:25,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:26,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:27,YYAGYREKYRQTDVNKLYLRYNFYTWAELAYLWY\nHLA-C*05:28,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:29,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:30,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:31,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:32,YYAGYREKYRQTDVNRLYLRYNFYTWAERAYTWY\nHLA-C*05:33,YYAGCREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:34,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:35,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:36,YYAGYRENYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:37,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:38,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:39,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYLWY\nHLA-C*05:40,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:41,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:42,YYAGYREKYRQADVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:43,YDAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:44,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*05:45,YYAGYREKYRQTDVNKLYLRYNFYTWAERAYTWY\nHLA-C*06:02,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:03,YYSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:04,YDSGYREKYRQADVNKLYLWYDSYTWAELAYTWY\nHLA-C*06:05,YDSGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C*06:06,YDSGYREKYRQADVNKLYLWYDSYTWAERAYTWY\nHLA-C*06:07,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:08,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYEWY\nHLA-C*06:09,YDSGYREKYRQADVNKLYLWYNFYTWAEWAYTWY\nHLA-C*06:10,YDPGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:11,YDSGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*06:12,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:13,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:14,YDSGYREKYRQADVNKLYIWYDSYTWAEWAYTWY\nHLA-C*06:15,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:17,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:18,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:19,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:20,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:21,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:22,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:23,YDSGYREKYRQADVNKLYLWCDSYTWAEWAYTWY\nHLA-C*06:24,YDSGYREKYRQADVNKLYLWYDSYTWAEWAHTWY\nHLA-C*06:25,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:26,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:27,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:28,YDAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:29,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:30,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:31,YDSGYREKYRQADVNKLYLWYDSYTWAAWAYTWY\nHLA-C*06:32,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:33,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:34,YDSGYREKYRQADVNKLYLWYDFYTWAEWAYTWY\nHLA-C*06:35,YDSGYREKYRQADVNKLYIRSDSYTWAEWAYTWY\nHLA-C*06:36,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:37,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:38,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:39,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:40,YDSGYREKYRQADVNKLYLWYDSYTWAEWTYTWY\nHLA-C*06:41,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:42,YDSGYREKYRQADVNKLYLWYDSYTRAEWAYTWY\nHLA-C*06:43,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:44,YDSGYRENYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*06:45,YDSGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*07:01,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:02,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:03,YDSGYREKYRQADVSNLYLRSDSYTWAALAYLWY\nHLA-C*07:04,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:05,YDSGYREKYRQADVSNLYLNYDSYTLAALAYTWY\nHLA-C*07:06,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:07,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C*07:08,YDSGYREKYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C*07:09,YDSGYRENYRQADVNKLYLRYDSYTLAALAYTWY\nHLA-C*07:10,YDSGYREKYRQADVSNLYIRSDSYTLAALAYTWY\nHLA-C*07:11,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:12,YDSGYREKYRQADVSNLYFRYDFYTWAADAYTWY\nHLA-C*07:13,YDSGYREKYRQADVSNLYLRSDFYTLAALAYTWY\nHLA-C*07:14,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:15,YDSGYREKYRQADVSNLYLRSDSYTLAALAYEWY\nHLA-C*07:16,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:17,YDSGYREKYRQADVSNLYLRSDSYTWAALAYTWY\nHLA-C*07:18,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:19,YDSGYRENYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:20,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:21,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:22,YDSGYRENYRQADVSNLYLRYDSYTLAAWAYTWY\nHLA-C*07:23,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:24,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:25,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:26,YYSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:27,YDSGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:28,YDSGYRENYRQADVSNLYLRYNFYTLAALAYTWY\nHLA-C*07:29,YDSGYREKYRQADVSNLYLRSDYYTLAALAYTWY\nHLA-C*07:30,YDSGYRENYRQADVSNLYLRYDSYTLAGLAYTWY\nHLA-C*07:31,YDSGYREKYRQADVSNLYLWYDSYTLAALAYTWY\nHLA-C*07:35,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:36,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:37,YDSGYREKYRQADVSNLYLRSDSYTLAARAYTWY\nHLA-C*07:38,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:39,YDSGYREKYRQTDVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:40,YDSGYRENYRQTDVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:41,YDSGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*07:42,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:43,YDSGYREKYRQADVSNLYIRYDSYTLAALAYTWY\nHLA-C*07:44,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:45,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:46,YDSEYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:47,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:48,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:49,YDSGYREKYRQADVNNLYLRSDSYTLAALAYTWY\nHLA-C*07:50,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:51,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:52,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:53,YDSGYRENYRQADVSNLYLRYDSYTLAAQAYTWY\nHLA-C*07:54,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:56,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:57,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:58,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:59,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:60,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:62,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:63,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:64,YSAGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:65,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:66,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:67,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:68,YDSGYREKYRQADVSNLYLRSDSYTLAADAYTWY\nHLA-C*07:69,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:70,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:71,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:72,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:73,YDAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:74,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:75,YDSGYREKYRQADVSNLHLRSDSYTLAALAYTWY\nHLA-C*07:76,YDSGYREKYRQADVNKLYLRSDSYTLAALAYTWY\nHLA-C*07:77,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:78,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:79,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:80,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:81,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:82,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:83,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:84,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:85,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:86,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:87,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:88,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:89,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:90,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:91,YDSGYRENYRQADVSNLYLRYDSYTLTALAYTWY\nHLA-C*07:92,YYAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:93,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:94,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:95,YDSGYRENYRQADVSNLYLRYDSYTLAVLAYTWY\nHLA-C*07:96,YYAGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:97,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:99,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:100,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:101,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:102,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:103,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:105,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:106,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:107,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:108,YDSGYRENYRQADVSNLYLRFDSYTLAALAYTWY\nHLA-C*07:109,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:110,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:111,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:112,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:113,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:114,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:115,YDSGYRENYRQADVSDLYLRYDSYTLAALAYTWY\nHLA-C*07:116,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:117,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:118,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:119,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:120,YDSGYRENYRQADVSNLYLRYDSYTLAALAYPWY\nHLA-C*07:122,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:123,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:124,YDSGYRENYRQADESNLYLRYDSYTLAALAYTWY\nHLA-C*07:125,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:126,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:127,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:128,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:129,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:130,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:131,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:132,DDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:133,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:134,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:135,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:136,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:137,YDSGYREKYRQADVSNLYLRSDSYTLAALTYTWY\nHLA-C*07:138,YDSGYREKYRQADVSNLYLRSDSYTLAAWAYTWY\nHLA-C*07:139,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:140,YDSGYRENYRQADVSNLYLRYDSYTWAVDAYTWY\nHLA-C*07:141,YDSGYRENYRQADVSNLYLRYDSYTWAALAYTWY\nHLA-C*07:142,YDSGYREKYRQADVSNLYFRYDFYTLAADAYTWY\nHLA-C*07:143,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:144,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:145,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:146,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:147,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*07:148,YDSGYRENYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*07:149,YDSGYREKYRQADVSNLYLRSDSYTLAALAYTWY\nHLA-C*08:01,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:02,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:03,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:04,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C*08:05,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:06,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYAWY\nHLA-C*08:07,YYAGYREKYRQTDVSNLYLRYNFYTLAERAYTWY\nHLA-C*08:08,YYAGYREKYRQTDVSNLYLSYNFYTWATLAYTWY\nHLA-C*08:09,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C*08:10,YYAGYREKYRQTDVNKLYLRYNFYTWATLAYTWY\nHLA-C*08:11,YYAGYREKYRQTDVSNLYLRYDSYTWATLAYTWY\nHLA-C*08:12,YYAGYREKYRQTDVSNLYLWYNFYTWAERAYTWY\nHLA-C*08:13,YYAGYREKYRQTDVSNLYLRYNFYTWAELAYTWY\nHLA-C*08:14,YYSGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:15,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:16,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:17,YYAGYREKYCQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:18,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:19,YYAGYREKYRQTDVSNLYLRFNFYTWAERAYTWY\nHLA-C*08:20,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:21,YYAGYREKYRQADVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:22,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:23,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:24,YYAGYREKYRQTDVSNLYLRYNFYTWATLAYTWY\nHLA-C*08:25,YYAGYREKYRQADVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:27,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C*08:28,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:29,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYTWY\nHLA-C*08:30,YYAGYREKYRQTDVSNLYLRYNFYTWPERAYTWY\nHLA-C*08:31,YYAGYREKYRQTDVSNLYLRYNFYTWAEWAYEWY\nHLA-C*08:32,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:33,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:34,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*08:35,YYAGYREKYRQTDVSNLYLRYNFYTWAERAYTWY\nHLA-C*12:02,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:03,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:04,YYAGYREKYRQADVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:05,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:06,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:07,YYAGYREKYRQADVGNLYLWYDSYTWAEWAYTWY\nHLA-C*12:08,YYAGYRENYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:09,YYAGYREKYRQTDVNKLYLWYDSYTWAERAYTWY\nHLA-C*12:10,YYAGYREKYRQADVSNLYLRFDSYTWAEWAYTWY\nHLA-C*12:11,YYAGYREKYRQADVSNLYLWSDSYTWAEWAYTWY\nHLA-C*12:12,YYAGYREKYRQADESNLYLWYDSYTWAEWAYTWY\nHLA-C*12:13,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:14,YYAGYREKYRQADVSNLYLRYDSYTLAALAYTWY\nHLA-C*12:15,YYAGYREKYRQADVSNLYLWYDLYTWAEWAYTWY\nHLA-C*12:16,YDSGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:17,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:18,YYAGYREKYRQADVSNLYLRYDSYTWAELAYTWY\nHLA-C*12:19,YYAGYREKYRQADVSNLYLWYDSYTWAECAYTWY\nHLA-C*12:20,YYAGYREKYRQADVSNLYLWYDSYTWAELAYTWY\nHLA-C*12:21,YYAGYREKYRQTDVNKLYLRYDSYTWAEWAYTWY\nHLA-C*12:22,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:23,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:24,YYAGYREKYRQADVSNLYLWYDSYTWAERAYTWY\nHLA-C*12:25,YYAGYPEKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:26,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:27,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:28,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:29,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:30,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:31,YYAGYREKYRQADVSNLYLWYNFYTWAEWAYTWY\nHLA-C*12:32,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:33,YYAGYREKYRQTDVNKLYLWYDSYTWAEWAYTWY\nHLA-C*12:34,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:35,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:36,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:37,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:38,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:40,YYAGYREKYRQADVSNLYLRYDSYTWAEWAYTWY\nHLA-C*12:41,YYAGYREKYRQADVNKLYLRYDSYTWAEWAYTWY\nHLA-C*12:43,YYAGYREKYRQADVSNLYLWYDSYTWAEWAYTWY\nHLA-C*12:44,YYAGYREKYRQADVSNLYIRYDSYTWAEWAYTWY\nHLA-C*14:02,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:03,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:04,YSAGYREKYRQADVNNLYLWFDSYTWAERAYTWY\nHLA-C*14:05,YSAGYREKYRQTDVSNLYLWYDSYTWAERAYTWY\nHLA-C*14:06,YSAGYREKYRQTDVSNLYLWFDSYTWAELAYTWY\nHLA-C*14:08,YSAGYREKYRQTDVSNLYPWFDSYTWAERAYTWY\nHLA-C*14:09,YSAGYREKYRQTDVSNLYLRYDSYTWAERAYTWY\nHLA-C*14:10,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:11,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:12,YSAGYREKYRQTDVNKLYLWFDSYTWAERAYTWY\nHLA-C*14:13,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:14,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:15,YSAGYREKYRQTDVSNLYLWFDSYTWAALAYTWY\nHLA-C*14:16,YSAGYREKYRQTDVSNLYLWFDSYTWAEWAYTWY\nHLA-C*14:17,YSAGYREKYRQTDVSNLYLWFDSYTLAARAYTWY\nHLA-C*14:18,YSSGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:19,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYTWY\nHLA-C*14:20,YSAGYREKYRQTDVSNLYLWFDSYTWAERAYLWY\nHLA-C*15:02,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:03,YYAGYRENYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:04,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:05,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:06,YYAGYRENYRQTDVNKLYIRYDYYTWAELAYTWY\nHLA-C*15:07,YYAGYRENYRQTDVSNLYIRYDLYTWAELAYTWY\nHLA-C*15:08,YYAGYRENYRQTDVNKLYIRYDLYTWAERAYTWY\nHLA-C*15:09,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:10,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:11,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:12,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:13,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:15,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYLWY\nHLA-C*15:16,YYAGYREKYRQADVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:17,YYAGYREKYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:18,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:19,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:20,YYAGYREKYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:21,YYAGYRENYRQTDVSKLYIRYDLYTWAELAYTWY\nHLA-C*15:22,YYAGYRENYRQTDVNKLYLRYDFYTWAELAYTWY\nHLA-C*15:23,YDAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:24,YYAGYRENYRQTDVNKLYIRYNYYTWAELAYTWY\nHLA-C*15:25,YYAGYREKYRQADVSNLYIRYNFYTWAEDAYTSY\nHLA-C*15:26,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:27,YYAGYRNKYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:28,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:29,YYAGYRENYRQTDVNKLYIRYDFYTWAELAYTWY\nHLA-C*15:30,YYAGYRENYRQTDVNKLYIRYDSYTWAELAYTWY\nHLA-C*15:31,YYAGYRENYRQTDVNKLYIRYDLYTWAALAYTWY\nHLA-C*15:33,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:34,YYAGYRENYRQTDVNKLYIRYDLYTWAELAYTWY\nHLA-C*15:35,YYAGYRENYRQTDVNKLHIRYDLYTWAELAYTWY\nHLA-C*16:01,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:02,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:04,YYAGYREKYRQTDVSNLYLWYDSYTWAAWAYTWY\nHLA-C*16:06,YYAGYREKYRQTDVSNLYLRSDSYTWAAQAYTWY\nHLA-C*16:07,YYAGYREKYRQTDVSNLYLRYDSYTWAAQAYTWY\nHLA-C*16:08,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:09,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:10,YYAGYREKYRQTDVSNLYLWYDDYTWAAQAYTWY\nHLA-C*16:11,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:12,YYAGYGEKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:13,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:14,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:15,YYAGYREKYRQADVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:17,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:18,YYAGYREKYRQTDVSNLYLWCDSYTWAAQAYTWY\nHLA-C*16:19,YYAGYREKYRQTDVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:20,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:21,YDAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:22,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:23,YYAGYREKYRQTDVSNLYLWFDSYTWAAQAYTWY\nHLA-C*16:24,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*16:25,YYAGYREKYRQADVNKLYLWYDSYTWAAQAYTWY\nHLA-C*16:26,YYAGYREKYRQTDVSNLYLWYDSYTWAAQAYTWY\nHLA-C*17:01,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:02,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:03,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:04,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:05,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:06,YYAGYREKYRQADVNKLYIRYNFYSLAELAYEWY\nHLA-C*17:07,YYAGYREKYRQADVNKLYIRYNFYSLAELAYLWY\nHLA-C*18:01,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*18:02,YDSGYREKYRQADVNKLYLRFNFYTWAERAYTWY\nHLA-C*18:03,YDSGYREKYRQADVNKLYLRFNFYTWAEWAYEWY\n"
  },
  {
    "path": "downloads-generation/models_class1_unselected_with_mass_spec/generate_hyperparameters.py",
    "content": "\"\"\"\nGenerate grid of hyperparameters\n\"\"\"\n\nfrom sys import stdout\nfrom copy import deepcopy\nfrom yaml import dump\n\nbase_hyperparameters = {\n    ##########################################\n    # ENSEMBLE SIZE\n    ##########################################\n    \"n_models\": 4,\n\n    ##########################################\n    # OPTIMIZATION\n    ##########################################\n    \"max_epochs\": 500,\n    \"patience\": 20,\n    \"early_stopping\": True,\n    \"validation_split\": 0.1,\n    \"minibatch_size\": None,\n    \"loss\": \"custom:mse_with_inequalities\",\n\n    ##########################################\n    # RANDOM NEGATIVE PEPTIDES\n    ##########################################\n    \"random_negative_rate\": 0.1,\n    \"random_negative_constant\": 25,\n    \"random_negative_affinity_min\": 20000.0,\n    \"random_negative_affinity_max\": 50000.0,\n\n    ##########################################\n    # PEPTIDE REPRESENTATION\n    ##########################################\n    # One of \"one-hot\", \"embedding\", or \"BLOSUM62\".\n    \"peptide_amino_acid_encoding\": \"BLOSUM62\",\n    \"use_embedding\": False,  # maintained for backward compatability\n    \"embedding_output_dim\": 8,  # only used if using embedding\n    \"kmer_size\": 15,\n\n    ##########################################\n    # NEURAL NETWORK ARCHITECTURE\n    ##########################################\n    \"locally_connected_layers\": [\n        {\n            \"filters\": 8,\n            \"activation\": \"tanh\",\n            \"kernel_size\": 3\n        }\n    ],\n    \"activation\": \"tanh\",\n    \"output_activation\": \"sigmoid\",\n    \"layer_sizes\": [16],\n    \"dense_layer_l1_regularization\": None,\n    \"batch_normalization\": False,\n    \"dropout_probability\": 0.0,\n\n    ##########################################\n    # TRAINING Data\n    ##########################################\n    \"train_data\": {\"subset\": \"all\", \"pretrain_min_points\": 1000},\n}\n\ngrid = []\nfor train_subset in [\"all\", \"quantitative\"]:\n    for minibatch_size in [128]:\n        for dense_layer_size in [8, 16, 32, 64]:\n            for l1 in [0.0, 0.001]:\n                for num_lc in [0, 1, 2]:\n                    for lc_kernel_size in [3, 5]:\n                        new = deepcopy(base_hyperparameters)\n                        new[\"minibatch_size\"] = minibatch_size\n                        new[\"train_data\"][\"subset\"] = train_subset\n                        new[\"layer_sizes\"] = [dense_layer_size]\n                        new[\"dense_layer_l1_regularization\"] = l1\n                        (lc_layer,) = new[\"locally_connected_layers\"]\n                        lc_layer['kernel_size'] = lc_kernel_size\n                        if num_lc == 0:\n                            new[\"locally_connected_layers\"] = []\n                        elif num_lc == 1:\n                            new[\"locally_connected_layers\"] = [lc_layer]\n                        elif num_lc == 2:\n                            new[\"locally_connected_layers\"] = [lc_layer, deepcopy(lc_layer)]\n                        if not grid or new not in grid:\n                            grid.append(new)\n\ndump(grid, stdout)\n"
  },
  {
    "path": "downloads-generation/random_peptide_predictions/GENERATE.sh",
    "content": "#!/bin/bash\n#\n# Generate predictions for random peptides. Used for pre-training some models.\n#\nset -e\nset -x\n\nDOWNLOAD_NAME=random_peptide_predictions\nSCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation\nSCRIPT_ABSOLUTE_PATH=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)/$(basename \"${BASH_SOURCE[0]}\")\"\nSCRIPT_DIR=$(dirname \"$SCRIPT_ABSOLUTE_PATH\")\nexport PYTHONUNBUFFERED=1\n\nmkdir -p \"$SCRATCH_DIR\"\nrm -rf \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\nmkdir \"$SCRATCH_DIR/$DOWNLOAD_NAME\"\n\n# Send stdout and stderr to a logfile included with the archive.\nexec >  >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\")\nexec 2> >(tee -ia \"$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt\" >&2)\n\n# Log some environment info\ndate\npip freeze\ngit status\n\ncd $SCRATCH_DIR/$DOWNLOAD_NAME\ncp $SCRIPT_DIR/random_predictions.py .\ncp $SCRIPT_ABSOLUTE_PATH .\n\ntime python random_predictions.py \\\n    --num-peptides 5000000 \\\n    --models \"$(mhcflurry-downloads path models_class1_selected_no_mass_spec)/models\" \\\n    --out predictions.csv\n\nbzip2 predictions.csv\nbzip2 LOG.txt\ntar -cjf \"../${DOWNLOAD_NAME}.tar.bz2\" *\n\necho \"Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2\"\n"
  },
  {
    "path": "downloads-generation/random_peptide_predictions/random_predictions.py",
    "content": "\"\"\"\nGenerate predictions for random peptides.\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\nimport argparse\nimport time\nimport math\n\nimport pandas\n\nimport mhcflurry\nfrom mhcflurry.common import random_peptides\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\"--models\", required=True)\nparser.add_argument(\"--num-peptides\", type=int)\nparser.add_argument(\"--out\", required=True)\nparser.add_argument(\"--chunksize\", type=int, default=10000)\n\n\ndef run():\n    args = parser.parse_args(sys.argv[1:])\n    print(args)\n\n    predictor = mhcflurry.Class1AffinityPredictor.load(args.models)\n\n    alleles = pandas.Series(predictor.supported_alleles)\n\n    # Clear the file\n    pandas.DataFrame(columns=alleles).to_csv(args.out, index=True)\n\n    (min_length, max_length) = predictor.supported_peptide_lengths\n\n    peptides_per_length = int(\n        math.ceil(args.chunksize / (max_length - min_length)))\n\n    peptides_written = 0\n    i = 0\n    while peptides_written < args.num_peptides:\n        print(\"Chunk %d / %d\" % (\n            i + 1, math.ceil(args.num_peptides / args.chunksize)))\n        start = time.time()\n        peptides = []\n        for l in range(8, 16):\n            peptides.extend(random_peptides(peptides_per_length, length=l))\n\n        peptides = pandas.Series(peptides).sample(\n            n=min(args.chunksize, args.num_peptides - peptides_written)).values\n        encodable_peptides = mhcflurry.encodable_sequences.EncodableSequences.create(\n            peptides)\n        df = pandas.DataFrame(index=peptides)\n        for allele in alleles:\n            df[allele] = predictor.predict(encodable_peptides, allele=allele)\n        df.to_csv(\n            args.out, index=True, mode='a', header=False, float_format='%.1f')\n        print(\"Wrote: %s  [%0.2f sec]\" % (args.out, time.time() - start))\n        i += 1\n        peptides_written += len(peptides)\n\n    print(\"Done.\")\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "lint.sh",
    "content": "#!/bin/bash\nset -o errexit\n\n# Lint using ruff (fast Python linter)\n# Run from project root directory\n\necho \"Running ruff linter...\"\nruff check mhcflurry/ test/ --output-format=concise\n\necho \"Passes ruff check\"\n"
  },
  {
    "path": "mhcflurry/__init__.py",
    "content": "\"\"\"\nClass I MHC ligand prediction package\n\"\"\"\n\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .class1_neural_network import Class1NeuralNetwork\nfrom .class1_processing_predictor import Class1ProcessingPredictor\nfrom .class1_processing_neural_network import Class1ProcessingNeuralNetwork\nfrom .class1_presentation_predictor import Class1PresentationPredictor\n\nfrom .version import __version__\n\n__all__ = [\n    \"__version__\",\n    \"Class1AffinityPredictor\",\n    \"Class1NeuralNetwork\",\n    \"Class1ProcessingPredictor\",\n    \"Class1ProcessingNeuralNetwork\",\n    \"Class1PresentationPredictor\",\n]\n"
  },
  {
    "path": "mhcflurry/allele_encoding.py",
    "content": "import pandas\n\nfrom . import amino_acid\n\n\nclass AlleleEncoding(object):\n    def __init__(self, alleles=None, allele_to_sequence=None, borrow_from=None):\n        \"\"\"\n        A place to cache encodings for a sequence of alleles.\n\n        We frequently work with alleles by integer indices, for example as\n        inputs to neural networks. This class is used to map allele names to\n        integer indices in a consistent way by keeping track of the universe\n        of alleles under use, i.e. a distinction is made between the universe\n        of supported alleles (what's in `allele_to_sequence`) and the actual\n        set of alleles used for some task (what's in `alleles`).\n\n        Parameters\n        ----------\n        alleles : list of string\n            Allele names. If any allele is None instead of string, it will be\n            mapped to the special index value -1.\n\n        allele_to_sequence : dict of str -> str\n            Allele name to amino acid sequence\n\n        borrow_from : AlleleEncoding, optional\n            If specified, do not specify allele_to_sequence. The sequences from\n            the provided instance are used. This guarantees that the mappings\n            from allele to index and from allele to sequence are the same\n            between the instances.\n        \"\"\"\n\n        if alleles is not None:\n            alleles = pandas.Series(alleles)\n        self.borrow_from = borrow_from\n        self.allele_to_sequence = allele_to_sequence\n\n        if self.borrow_from is None:\n            assert allele_to_sequence is not None\n            all_alleles = (\n                sorted(allele_to_sequence))\n            self.allele_to_index = dict(\n                (allele, i)\n                for (i, allele) in enumerate([None] + all_alleles))\n            unpadded = pandas.Series([\n                    allele_to_sequence[a] if a is not None else \"\"\n                    for a in [None] + all_alleles\n                ],\n                index=[None] + all_alleles)\n            self.sequences = unpadded.str.pad(\n                unpadded.str.len().max(), fillchar=\"X\")\n        else:\n            assert allele_to_sequence is None\n            self.allele_to_index = borrow_from.allele_to_index\n            self.sequences = borrow_from.sequences\n            self.allele_to_sequence = borrow_from.allele_to_sequence\n\n        if alleles is not None:\n            assert all(\n                allele in self.allele_to_index for allele in alleles),\\\n                \"Missing alleles: \" + \" \".join(set(\n                    a for a in alleles if a not in self.allele_to_index))\n            self.indices = alleles.map(self.allele_to_index)\n            assert not self.indices.isnull().any()\n            self.alleles = alleles\n        else:\n            self.indices = None\n            self.alleles = None\n\n        self.encoding_cache = {}\n\n    def compact(self):\n        \"\"\"\n        Return a new AlleleEncoding in which the universe of supported alleles\n        is only the alleles actually used.\n\n        Returns\n        -------\n        AlleleEncoding\n        \"\"\"\n        return AlleleEncoding(\n            alleles=self.alleles,\n            allele_to_sequence=dict(\n                (allele, self.allele_to_sequence[allele])\n                for allele in self.alleles.unique()\n                if allele is not None))\n\n    def allele_representations(self, encoding_name):\n        \"\"\"\n        Encode the universe of supported allele sequences to a matrix.\n\n        Parameters\n        ----------\n        encoding_name : string\n            How to represent amino acids. Valid names are \"BLOSUM62\" or\n            \"one-hot\". See `amino_acid.ENCODING_DATA_FRAMES`.\n\n        Returns\n        -------\n        numpy.array of shape\n            (num alleles in universe, sequence length, vector size)\n        where vector size is usually 21 (20 amino acids + X character)\n        \"\"\"\n        if self.borrow_from is not None:\n            return self.borrow_from.allele_representations(encoding_name)\n\n        cache_key = (\n            \"allele_representations\",\n            encoding_name)\n        if cache_key not in self.encoding_cache:\n            index_encoded_matrix = amino_acid.index_encoding(\n                self.sequences.values,\n                amino_acid.AMINO_ACID_INDEX)\n            vector_encoded = amino_acid.fixed_vectors_encoding(\n                index_encoded_matrix,\n                amino_acid.ENCODING_DATA_FRAMES[encoding_name])\n            self.encoding_cache[cache_key] = vector_encoded\n        return self.encoding_cache[cache_key]\n\n    def fixed_length_vector_encoded_sequences(self, encoding_name):\n        \"\"\"\n        Encode allele sequences (not the universe of alleles) to a matrix.\n\n        Parameters\n        ----------\n        encoding_name : string\n            How to represent amino acids. Valid names are \"BLOSUM62\" or\n            \"one-hot\". See `amino_acid.ENCODING_DATA_FRAMES`.\n\n        Returns\n        -------\n        numpy.array with shape:\n            (num alleles, sequence length, vector size)\n        where vector size is usually 21 (20 amino acids + X character)\n        \"\"\"\n        cache_key = (\n            \"fixed_length_vector_encoding\",\n            encoding_name)\n        if cache_key not in self.encoding_cache:\n            vector_encoded = self.allele_representations(encoding_name)\n            result = vector_encoded[self.indices]\n            self.encoding_cache[cache_key] = result\n        return self.encoding_cache[cache_key]\n"
  },
  {
    "path": "mhcflurry/amino_acid.py",
    "content": "\"\"\"\nFunctions for encoding fixed length sequences of amino acids into various\nvector representations, such as one-hot and BLOSUM62.\n\"\"\"\n\nimport collections\nimport warnings\nfrom copy import copy\nfrom io import StringIO\n\nimport pandas\n\n\nCOMMON_AMINO_ACIDS = collections.OrderedDict(sorted({\n    \"A\": \"Alanine\",\n    \"R\": \"Arginine\",\n    \"N\": \"Asparagine\",\n    \"D\": \"Aspartic Acid\",\n    \"C\": \"Cysteine\",\n    \"E\": \"Glutamic Acid\",\n    \"Q\": \"Glutamine\",\n    \"G\": \"Glycine\",\n    \"H\": \"Histidine\",\n    \"I\": \"Isoleucine\",\n    \"L\": \"Leucine\",\n    \"K\": \"Lysine\",\n    \"M\": \"Methionine\",\n    \"F\": \"Phenylalanine\",\n    \"P\": \"Proline\",\n    \"S\": \"Serine\",\n    \"T\": \"Threonine\",\n    \"W\": \"Tryptophan\",\n    \"Y\": \"Tyrosine\",\n    \"V\": \"Valine\",\n}.items()))\nCOMMON_AMINO_ACIDS_WITH_UNKNOWN = copy(COMMON_AMINO_ACIDS)\nCOMMON_AMINO_ACIDS_WITH_UNKNOWN[\"X\"] = \"Unknown\"\n\nAMINO_ACID_INDEX = dict(\n    (letter, i) for (i, letter) in enumerate(COMMON_AMINO_ACIDS_WITH_UNKNOWN))\n\nfor (letter, i) in list(AMINO_ACID_INDEX.items()):\n    AMINO_ACID_INDEX[letter.lower()] = i  # Support lower-case as well.\n\nAMINO_ACIDS = list(COMMON_AMINO_ACIDS_WITH_UNKNOWN.keys())\n\nBLOSUM62_MATRIX = pandas.read_csv(StringIO(\"\"\"\n   A  R  N  D  C  Q  E  G  H  I  L  K  M  F  P  S  T  W  Y  V  X\nA  4 -1 -2 -2  0 -1 -1  0 -2 -1 -1 -1 -1 -2 -1  1  0 -3 -2  0  0\nR -1  5  0 -2 -3  1  0 -2  0 -3 -2  2 -1 -3 -2 -1 -1 -3 -2 -3  0\nN -2  0  6  1 -3  0  0  0  1 -3 -3  0 -2 -3 -2  1  0 -4 -2 -3  0\nD -2 -2  1  6 -3  0  2 -1 -1 -3 -4 -1 -3 -3 -1  0 -1 -4 -3 -3  0\nC  0 -3 -3 -3  9 -3 -4 -3 -3 -1 -1 -3 -1 -2 -3 -1 -1 -2 -2 -1  0\nQ -1  1  0  0 -3  5  2 -2  0 -3 -2  1  0 -3 -1  0 -1 -2 -1 -2  0\nE -1  0  0  2 -4  2  5 -2  0 -3 -3  1 -2 -3 -1  0 -1 -3 -2 -2  0\nG  0 -2  0 -1 -3 -2 -2  6 -2 -4 -4 -2 -3 -3 -2  0 -2 -2 -3 -3  0\nH -2  0  1 -1 -3  0  0 -2  8 -3 -3 -1 -2 -1 -2 -1 -2 -2  2 -3  0\nI -1 -3 -3 -3 -1 -3 -3 -4 -3  4  2 -3  1  0 -3 -2 -1 -3 -1  3  0\nL -1 -2 -3 -4 -1 -2 -3 -4 -3  2  4 -2  2  0 -3 -2 -1 -2 -1  1  0\nK -1  2  0 -1 -3  1  1 -2 -1 -3 -2  5 -1 -3 -1  0 -1 -3 -2 -2  0\nM -1 -1 -2 -3 -1  0 -2 -3 -2  1  2 -1  5  0 -2 -1 -1 -1 -1  1  0\nF -2 -3 -3 -3 -2 -3 -3 -3 -1  0  0 -3  0  6 -4 -2 -2  1  3 -1  0\nP -1 -2 -2 -1 -3 -1 -1 -2 -2 -3 -3 -1 -2 -4  7 -1 -1 -4 -3 -2  0\nS  1 -1  1  0 -1  0  0  0 -1 -2 -2  0 -1 -2 -1  4  1 -3 -2 -2  0\nT  0 -1  0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -1  1  5 -2 -2  0  0\nW -3 -3 -4 -4 -2 -2 -3 -2 -2 -3 -2 -3 -1  1 -4 -3 -2 11  2 -3  0\nY -2 -2 -2 -3 -2 -1 -2 -3  2 -1 -1 -2 -1  3 -3 -2 -2  2  7 -1  0\nV  0 -3 -3 -3 -1 -2 -2 -3 -3  3  1 -2  1 -1 -2 -2  0 -3 -1  4  0\nX  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  1\n\"\"\"), sep=r'\\s+').loc[AMINO_ACIDS, AMINO_ACIDS].astype(\"int8\")\nassert (BLOSUM62_MATRIX == BLOSUM62_MATRIX.T).all().all()\n\nENCODING_DATA_FRAMES = {\n    \"BLOSUM62\": BLOSUM62_MATRIX,\n    \"one-hot\": pandas.DataFrame([\n        [1 if i == j else 0 for i in range(len(AMINO_ACIDS))]\n        for j in range(len(AMINO_ACIDS))\n    ], index=AMINO_ACIDS, columns=AMINO_ACIDS)\n}\n\n\ndef available_vector_encodings():\n    \"\"\"\n    Return list of supported amino acid vector encodings.\n\n    Returns\n    -------\n    list of string\n\n    \"\"\"\n    return list(ENCODING_DATA_FRAMES)\n\n\ndef vector_encoding_length(name):\n    \"\"\"\n    Return the length of the given vector encoding.\n\n    Parameters\n    ----------\n    name : string\n\n    Returns\n    -------\n    int\n    \"\"\"\n    return ENCODING_DATA_FRAMES[name].shape[1]\n\n\ndef index_encoding(sequences, letter_to_index_dict):\n    \"\"\"\n    Encode a sequence of same-length strings to a matrix of integers of the\n    same shape. The map from characters to integers is given by\n    `letter_to_index_dict`.\n\n    Given a sequence of `n` strings all of length `k`, return a `k * n` array where\n    the (`i`, `j`)th element is `letter_to_index_dict[sequence[i][j]]`.\n\n    Parameters\n    ----------\n    sequences : list of length n of strings of length k\n    letter_to_index_dict : dict : string -> int\n\n    Returns\n    -------\n    numpy.array of integers with shape (`k`, `n`)\n    \"\"\"\n    df = pandas.DataFrame(iter(s) for s in sequences)\n    with warnings.catch_warnings():\n        warnings.filterwarnings(\"ignore\", category=FutureWarning,\n                                message=\".*Downcasting.*\")\n        warnings.filterwarnings(\"ignore\", category=DeprecationWarning,\n                                message=\".*no_silent_downcasting.*\")\n        result = df.replace(letter_to_index_dict).infer_objects()\n    return result.values\n\n\ndef fixed_vectors_encoding(index_encoded_sequences, letter_to_vector_df):\n    \"\"\"\n    Given a `n` x `k` matrix of integers such as that returned by `index_encoding()` and\n    a dataframe mapping each index to an arbitrary vector, return a `n * k * m`\n    array where the (`i`, `j`)'th element is `letter_to_vector_df.iloc[sequence[i][j]]`.\n\n    The dataframe index and columns names are ignored here; the indexing is done\n    entirely by integer position in the dataframe.\n\n    Parameters\n    ----------\n    index_encoded_sequences : `n` x `k` array of integers\n\n    letter_to_vector_df : pandas.DataFrame of shape (`alphabet size`, `m`)\n\n    Returns\n    -------\n    numpy.array of integers with shape (`n`, `k`, `m`)\n    \"\"\"\n    (num_sequences, sequence_length) = index_encoded_sequences.shape\n    target_shape = (\n        num_sequences, sequence_length, letter_to_vector_df.shape[0])\n    result = letter_to_vector_df.iloc[\n        index_encoded_sequences.reshape((-1,))  # reshape() avoids copy\n    ].values.reshape(target_shape)\n    return result\n"
  },
  {
    "path": "mhcflurry/calibrate_percentile_ranks_command.py",
    "content": "\"\"\"\nCalibrate percentile ranks for models. Runs in-place.\n\"\"\"\nimport argparse\nimport os\nimport signal\nimport sys\nimport time\nimport traceback\nimport collections\nfrom functools import partial\n\nimport pandas\nimport numpy\n\n\nimport tqdm  # progress bar\n\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .class1_presentation_predictor import Class1PresentationPredictor\nfrom .common import normalize_allele_name\nfrom .encodable_sequences import EncodableSequences\nfrom .common import configure_logging, random_peptides, amino_acid_distribution\nfrom .local_parallelism import (\n    add_local_parallelism_args,\n    worker_pool_with_gpu_assignments_from_args,\n    call_wrapped_kwargs)\nfrom .cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\nparser = argparse.ArgumentParser(usage=__doc__)\nparser.add_argument(\n    \"--predictor-kind\",\n    choices=(\"class1_affinity\", \"class1_presentation\"),\n    default=\"class1_affinity\",\n    help=\"Type of predictor to calibrate\")\nparser.add_argument(\n    \"--models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to read and write models\")\nparser.add_argument(\n    \"--allele\",\n    default=None,\n    nargs=\"+\",\n    help=\"Alleles to calibrate percentile ranks for. If not specified all \"\n    \"alleles are used\")\nparser.add_argument(\n    \"--match-amino-acid-distribution-data\",\n    help=\"Sample random peptides from the amino acid distribution of the \"\n    \"peptides listed in the supplied CSV file, which must have a 'peptide' \"\n    \"column. If not specified a uniform distribution is used.\")\nparser.add_argument(\n    \"--alleles-file\",\n    default=None,\n    help=\"Use alleles in supplied CSV file, which must have an 'allele' column.\")\nparser.add_argument(\n    \"--num-peptides-per-length\",\n    type=int,\n    metavar=\"N\",\n    default=int(1e5),\n    help=\"Number of peptides per length to use to calibrate percent ranks. \"\n    \"Default: %(default)s.\")\nparser.add_argument(\n    \"--num-genotypes\",\n    type=int,\n    metavar=\"N\",\n    default=25,\n    help=\"Used when calibrrating a presentation predictor. Number of genotypes\"\n    \"to sample\")\nparser.add_argument(\n    \"--alleles-per-genotype\",\n    type=int,\n    metavar=\"N\",\n    default=6,\n    help=\"Used when calibrating a presentation predictor. Number of alleles \"\n    \"per genotype. Use 1 to calibrate for single alleles. Default: %(default)s\")\nparser.add_argument(\n    \"--motif-summary\",\n    default=False,\n    action=\"store_true\",\n    help=\"Calculate motifs and length preferences for each allele\")\nparser.add_argument(\n    \"--summary-top-peptide-fraction\",\n    default=[0.0001, 0.001, 0.01, 0.1, 1.0],\n    nargs=\"+\",\n    type=float,\n    metavar=\"X\",\n    help=\"The top X fraction of predictions (i.e. tightest binders) to use to \"\n    \"generate motifs and length preferences. Default: %(default)s\")\nparser.add_argument(\n    \"--length-range\",\n    default=(8, 15),\n    type=int,\n    nargs=2,\n    help=\"Min and max peptide length to calibrate, inclusive. \"\n    \"Default: %(default)s\")\nparser.add_argument(\n    \"--prediction-batch-size\",\n    type=int,\n    default=4096,\n    help=\"Batch size for predictions\")\nparser.add_argument(\n    \"--alleles-per-work-chunk\",\n    type=int,\n    metavar=\"N\",\n    default=1,\n    help=\"Number of alleles per work chunk. Default: %(default)s.\")\nparser.add_argument(\n    \"--verbosity\",\n    type=int,\n    help=\"Verbosity. Default: %(default)s\",\n    default=0)\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\n\ndef run(argv=sys.argv[1:]):\n    global GLOBAL_DATA\n\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    args.models_dir = os.path.abspath(args.models_dir)\n\n    configure_logging(verbose=args.verbosity > 1)\n\n    aa_distribution = None\n    if args.match_amino_acid_distribution_data:\n        distribution_peptides = pandas.read_csv(\n            args.match_amino_acid_distribution_data).peptide.unique()\n        distribution_peptides = [\n            x for x in distribution_peptides if 'X' not in x and 'B' not in x and 'U' not in x\n        ]\n        aa_distribution = amino_acid_distribution(distribution_peptides)\n        print(\"Using amino acid distribution:\")\n        print(aa_distribution)\n\n    start = time.time()\n    peptides = []\n    lengths = range(args.length_range[0], args.length_range[1] + 1)\n    for length in lengths:\n        peptides.extend(\n            random_peptides(\n                args.num_peptides_per_length,\n                length,\n                distribution=aa_distribution))\n    print(\"Done generating peptides in %0.2f sec.\" % (time.time() - start))\n\n    if args.predictor_kind == \"class1_affinity\":\n        return run_class1_affinity_predictor(args, peptides)\n    elif args.predictor_kind == \"class1_presentation\":\n        return run_class1_presentation_predictor(args, peptides)\n    else:\n        raise ValueError(\"Unsupported kind %s\" % args.predictor_kind)\n\n\ndef run_class1_presentation_predictor(args, peptides):\n    # This will trigger a Keras import - will break local parallelism.\n    predictor = Class1PresentationPredictor.load(args.models_dir)\n\n    if args.allele:\n        alleles = [normalize_allele_name(a) for a in args.allele]\n    elif args.alleles_file:\n        alleles = pandas.read_csv(args.alleles_file).allele.unique()\n    else:\n        alleles = predictor.supported_alleles\n\n    print(\"Num alleles\", len(alleles))\n\n    genotypes = {}\n    if args.alleles_per_genotype == 6:\n        gene_to_alleles = collections.defaultdict(list)\n        for a in alleles:\n            for gene in [\"A\", \"B\", \"C\"]:\n                if a.startswith(\"HLA-%s\" % gene):\n                    gene_to_alleles[gene].append(a)\n\n        for _ in range(args.num_genotypes):\n            genotype = []\n            for gene in [\"A\", \"A\", \"B\", \"B\", \"C\", \"C\"]:\n                genotype.append(numpy.random.choice(gene_to_alleles[gene]))\n            genotypes[\",\".join(genotype)] = genotype\n    elif args.alleles_per_genotype == 1:\n        for _ in range(args.num_genotypes):\n            genotype = [numpy.random.choice(alleles)]\n            genotypes[\",\".join(genotype)] = genotype\n    else:\n        raise ValueError(\"Alleles per genotype must be 6 or 1\")\n\n    print(\"Sampled genotypes: \", list(genotypes))\n    print(\"Num peptides: \", len(peptides))\n\n    start = time.time()\n    print(\"Generating predictions\")\n    predictions_df = predictor.predict(\n        peptides=peptides,\n        alleles=genotypes)\n    print(\"Finished in %0.2f sec.\" % (time.time() - start))\n    print(predictions_df)\n\n    print(\"Calibrating ranks\")\n    scores = predictions_df.presentation_score.values\n    predictor.calibrate_percentile_ranks(scores)\n    print(\"Done. Saving.\")\n\n    predictor.save(\n        args.models_dir,\n        write_affinity_predictor=False,\n        write_processing_predictor=False,\n        write_weights=False,\n        write_percent_ranks=True,\n        write_info=False,\n        write_metdata=False)\n    print(\"Wrote predictor to: %s\" % args.models_dir)\n\n\ndef run_class1_affinity_predictor(args, peptides):\n    global GLOBAL_DATA\n\n    # Load with optimization_level=0 so we can optimize per-worker later.\n    predictor = Class1AffinityPredictor.load(\n        args.models_dir,\n        optimization_level=0,\n    )\n\n    if args.allele:\n        alleles = [normalize_allele_name(a) for a in args.allele]\n    elif args.alleles_file:\n        alleles = pandas.read_csv(args.alleles_file).allele.unique()\n    else:\n        alleles = predictor.supported_alleles\n\n    allele_set = set(alleles)\n\n    if predictor.allele_to_sequence:\n        # Remove alleles that have the same sequence.\n        new_allele_set = set()\n        sequence_to_allele = collections.defaultdict(set)\n        for allele in list(allele_set):\n            sequence_to_allele[predictor.allele_to_sequence[allele]].add(allele)\n        for equivalent_alleles in sequence_to_allele.values():\n            equivalent_alleles = sorted(equivalent_alleles)\n            keep = equivalent_alleles.pop(0)\n            new_allele_set.add(keep)\n        print(\n            \"Sequence comparison reduced num alleles from\",\n            len(allele_set),\n            \"to\",\n            len(new_allele_set))\n        allele_set = new_allele_set\n\n    alleles = sorted(allele_set)\n\n    print(\"Percent rank calibration for %d alleles. \" % (len(alleles)))\n\n    print(\"Encoding %d peptides.\" % len(peptides))\n    start = time.time()\n    encoded_peptides = EncodableSequences.create(peptides)\n    del peptides\n\n    # Now we encode the peptides for each neural network, so the encoding\n    # becomes cached.\n    for network in predictor.neural_networks:\n        network.peptides_to_network_input(encoded_peptides)\n    assert encoded_peptides.encoding_cache  # must have cached the encoding\n    print(\"Finished encoding peptides in %0.2f sec.\" % (time.time() - start))\n\n    # Store peptides in global variable so they are in shared memory\n    # after fork, instead of needing to be pickled (when doing a parallel run).\n    GLOBAL_DATA[\"calibration_peptides\"] = encoded_peptides\n    GLOBAL_DATA[\"predictor\"] = predictor\n    GLOBAL_DATA[\"args\"] = {\n        'motif_summary': args.motif_summary,\n        'summary_top_peptide_fractions': args.summary_top_peptide_fraction,\n        'verbose': args.verbosity > 0,\n        'model_kwargs': {\n            'batch_size': args.prediction_batch_size,\n        }\n    }\n    del encoded_peptides\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n    worker_pool = None\n    start = time.time()\n\n    work_items = []\n    for allele in alleles:\n        if not work_items or len(\n                work_items[-1]['alleles']) >= args.alleles_per_work_chunk:\n            work_items.append({\"alleles\": []})\n        work_items[-1]['alleles'].append(allele)\n\n    if serial_run:\n        # Serial run\n        print(\"Running in serial.\")\n        results = (\n            do_class1_affinity_calibrate_percentile_ranks(**item) for item in work_items)\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        print(\"Running on cluster.\")\n        results = cluster_results_from_args(\n            args,\n            work_function=do_class1_affinity_calibrate_percentile_ranks,\n            work_items=work_items,\n            constant_data=GLOBAL_DATA,\n            result_serialization_method=\"pickle\",\n            clear_constant_data=True)\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n\n        for item in work_items:\n            item['constant_data'] = GLOBAL_DATA\n\n        results = worker_pool.imap_unordered(\n            partial(call_wrapped_kwargs, do_class1_affinity_calibrate_percentile_ranks),\n            work_items,\n            chunksize=1)\n\n    summary_results_lists = collections.defaultdict(list)\n    for work_item in tqdm.tqdm(results, total=len(work_items)):\n        for (transforms, summary_results) in work_item:\n            predictor.allele_to_percent_rank_transform.update(transforms)\n            if summary_results is not None:\n                for (item, value) in summary_results.items():\n                    summary_results_lists[item].append(value)\n    print(\"Done calibrating %d alleles.\" % len(alleles))\n    if summary_results_lists:\n        for (name, lst) in summary_results_lists.items():\n            df = pandas.concat(lst, ignore_index=True)\n            predictor.metadata_dataframes[name] = df\n            print(\"Including summary result: %s\" % name)\n            print(df)\n\n    predictor.save(args.models_dir, model_names_to_write=[])\n\n    percent_rank_calibration_time = time.time() - start\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n    print(\"Percent rank calibration time: %0.2f min.\" % (\n        percent_rank_calibration_time / 60.0))\n    print(\"Predictor written to: %s\" % args.models_dir)\n\n\ndef do_class1_affinity_calibrate_percentile_ranks(\n        alleles, constant_data=GLOBAL_DATA):\n\n    if 'predictor' not in constant_data:\n        raise ValueError(\"No predictor provided: \" + str(constant_data))\n\n    result_list = []\n    for (i, allele) in enumerate(alleles):\n        print(\"Processing allele\", i + 1, \"of\", len(alleles))\n        result_item = class1_affinity_calibrate_percentile_ranks(\n            allele,\n            constant_data['predictor'],\n            peptides=constant_data['calibration_peptides'],\n            **constant_data[\"args\"])\n        result_list.append(result_item)\n    return result_list\n\n\ndef class1_affinity_calibrate_percentile_ranks(\n        allele,\n        predictor,\n        peptides=None,\n        motif_summary=False,\n        summary_top_peptide_fractions=[0.001],\n        verbose=False,\n        model_kwargs={}):\n    if verbose:\n        print(\"Calibrating\", allele)\n    predictor.optimize()  # since we loaded with optimization_level=0\n    start = time.time()\n    summary_results = predictor.calibrate_percentile_ranks(\n        peptides=peptides,\n        alleles=[allele],\n        motif_summary=motif_summary,\n        summary_top_peptide_fractions=summary_top_peptide_fractions,\n        verbose=verbose,\n        model_kwargs=model_kwargs)\n    if verbose:\n        print(\"Done calibrating\", allele, \"in\", time.time() - start, \"sec\")\n    transforms = {\n        allele: predictor.allele_to_percent_rank_transform[allele],\n    }\n    return (transforms, summary_results)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "mhcflurry/class1_affinity_predictor.py",
    "content": "import collections\nimport hashlib\nimport json\nimport logging\nimport time\nimport warnings\nfrom os.path import join, exists, abspath\nfrom os import mkdir, environ\nfrom socket import gethostname\nfrom getpass import getuser\nfrom functools import partial\nimport numpy\nimport pandas\n\n\nfrom .class1_neural_network import Class1NeuralNetwork\nfrom .common import (\n    random_peptides,\n    positional_frequency_matrix,\n    normalize_allele_name\n)\nfrom .downloads import get_default_class1_models_dir\nfrom .encodable_sequences import EncodableSequences\nfrom .percent_rank_transform import PercentRankTransform\nfrom .regression_target import to_ic50\nfrom .version import __version__\nfrom .ensemble_centrality import CENTRALITY_MEASURES\nfrom .allele_encoding import AlleleEncoding\nfrom .common import save_weights, load_weights\n\n\n# Default function for combining predictions across models in an ensemble.\n# See ensemble_centrality.py for other options.\nDEFAULT_CENTRALITY_MEASURE = \"mean\"\n\n# Any value > 0 will result in attempting to optimize models after loading.\nOPTIMIZATION_LEVEL = int(environ.get(\"MHCFLURRY_OPTIMIZATION_LEVEL\", 1))\n\n\nclass Class1AffinityPredictor(object):\n    \"\"\"\n    High-level interface for peptide/MHC I binding affinity prediction.\n\n    This class manages low-level `Class1NeuralNetwork` instances, each of which\n    wraps a single PyTorch network. The purpose of `Class1AffinityPredictor` is to\n    implement ensembles, handling of multiple alleles, and predictor loading and\n    saving. It also provides a place to keep track of metadata like prediction\n    histograms for percentile rank calibration.\n    \"\"\"\n    def __init__(\n            self,\n            allele_to_allele_specific_models=None,\n            class1_pan_allele_models=None,\n            allele_to_sequence=None,\n            manifest_df=None,\n            allele_to_percent_rank_transform=None,\n            metadata_dataframes=None,\n            provenance_string=None,\n            optimization_info=None):\n        \"\"\"\n        Parameters\n        ----------\n        allele_to_allele_specific_models : dict of string -> list of `Class1NeuralNetwork`\n            Ensemble of single-allele models to use for each allele.\n\n        class1_pan_allele_models : list of `Class1NeuralNetwork`\n            Ensemble of pan-allele models.\n\n        allele_to_sequence : dict of string -> string\n            MHC allele name to fixed-length amino acid sequence (sometimes\n            referred to as the pseudosequence). Required only if\n            class1_pan_allele_models is specified.\n\n        manifest_df : `pandas.DataFrame`, optional\n            Must have columns: model_name, allele, config_json, model.\n            Only required if you want to update an existing serialization of a\n            Class1AffinityPredictor. Otherwise this dataframe will be generated\n            automatically based on the supplied models.\n\n        allele_to_percent_rank_transform : dict of string -> `PercentRankTransform`, optional\n            `PercentRankTransform` instances to use for each allele\n\n        metadata_dataframes : dict of string -> pandas.DataFrame, optional\n            Optional additional dataframes to write to the models dir when\n            save() is called. Useful for tracking provenance.\n\n        provenance_string : string, optional\n            Optional info string to use in __str__.\n\n        optimization_info : dict, optional\n            Dict describing any optimizations already performed on the model.\n            The only currently supported optimization is to merge ensembles\n            together into one PyTorch model.\n        \"\"\"\n\n        if allele_to_allele_specific_models is None:\n            allele_to_allele_specific_models = {}\n        if class1_pan_allele_models is None:\n            class1_pan_allele_models = []\n\n        self.allele_to_sequence = (\n            dict(allele_to_sequence)\n            if allele_to_sequence is not None else None)  # make a copy\n\n        self._master_allele_encoding = None\n        if class1_pan_allele_models:\n            assert self.allele_to_sequence\n\n        self.allele_to_allele_specific_models = allele_to_allele_specific_models\n        self.class1_pan_allele_models = class1_pan_allele_models\n        self._manifest_df = manifest_df\n\n        if not allele_to_percent_rank_transform:\n            allele_to_percent_rank_transform = {}\n        self.allele_to_percent_rank_transform = allele_to_percent_rank_transform\n        self.metadata_dataframes = (\n            dict(metadata_dataframes) if metadata_dataframes else {})\n        self._cache = {}\n        self.optimization_info = optimization_info if optimization_info else {}\n\n        assert isinstance(self.allele_to_allele_specific_models, dict)\n        assert isinstance(self.class1_pan_allele_models, list)\n\n        self.provenance_string = provenance_string\n        self.allele_to_canonical = {}  # populated by load()\n\n    @property\n    def manifest_df(self):\n        \"\"\"\n        A pandas.DataFrame describing the models included in this predictor.\n\n        Based on:\n        - self.class1_pan_allele_models\n        - self.allele_to_allele_specific_models\n\n        Returns\n        -------\n        pandas.DataFrame\n        \"\"\"\n        if self._manifest_df is None:\n            rows = []\n            for (i, model) in enumerate(self.class1_pan_allele_models):\n                rows.append((\n                    self.model_name(\"pan-class1\", i),\n                    \"pan-class1\",\n                    json.dumps(model.get_config()),\n                    model\n                ))\n            for (allele, models) in self.allele_to_allele_specific_models.items():\n                for (i, model) in enumerate(models):\n                    rows.append((\n                        self.model_name(allele, i),\n                        allele,\n                        json.dumps(model.get_config()),\n                        model\n                    ))\n            self._manifest_df = pandas.DataFrame(\n                rows,\n                columns=[\"model_name\", \"allele\", \"config_json\", \"model\"])\n        return self._manifest_df\n\n    def clear_cache(self):\n        \"\"\"\n        Clear values cached based on the neural networks in this predictor.\n\n        Users should call this after mutating any of the following:\n            - self.class1_pan_allele_models\n            - self.allele_to_allele_specific_models\n            - self.allele_to_sequence\n\n        Methods that mutate these instance variables will call this method on\n        their own if needed.\n        \"\"\"\n        self._cache.clear()\n        self.provenance_string = None\n\n    @property\n    def neural_networks(self):\n        \"\"\"\n        List of the neural networks in the ensemble.\n\n        Returns\n        -------\n        list of `Class1NeuralNetwork`\n        \"\"\"\n        result = []\n        for models in self.allele_to_allele_specific_models.values():\n            result.extend(models)\n        result.extend(self.class1_pan_allele_models)\n        return result\n\n    @classmethod\n    def merge(cls, predictors):\n        \"\"\"\n        Merge the ensembles of two or more `Class1AffinityPredictor` instances.\n\n        Note: the resulting merged predictor will NOT have calibrated percentile\n        ranks. Call `calibrate_percentile_ranks` on it if these are needed.\n\n        Parameters\n        ----------\n        predictors : sequence of `Class1AffinityPredictor`\n\n        Returns\n        -------\n        `Class1AffinityPredictor` instance\n\n        \"\"\"\n        assert len(predictors) > 0\n        if len(predictors) == 1:\n            return predictors[0]\n\n        allele_to_allele_specific_models = collections.defaultdict(list)\n        class1_pan_allele_models = []\n        allele_to_sequence = predictors[0].allele_to_sequence\n\n        for predictor in predictors:\n            for (allele, networks) in (\n                    predictor.allele_to_allele_specific_models.items()):\n                allele_to_allele_specific_models[allele].extend(networks)\n            class1_pan_allele_models.extend(\n                predictor.class1_pan_allele_models)\n\n        return Class1AffinityPredictor(\n            allele_to_allele_specific_models=allele_to_allele_specific_models,\n            class1_pan_allele_models=class1_pan_allele_models,\n            allele_to_sequence=allele_to_sequence\n        )\n\n    def merge_in_place(self, others):\n        \"\"\"\n        Add the models present in other predictors into the current predictor.\n\n        Parameters\n        ----------\n        others : list of Class1AffinityPredictor\n            Other predictors to merge into the current predictor.\n\n        Returns\n        -------\n        list of string : names of newly added models\n        \"\"\"\n        new_model_names = []\n        original_manifest = self.manifest_df\n        new_manifest_rows = []\n        for predictor in others:\n            for model in predictor.class1_pan_allele_models:\n                model_name = self.model_name(\n                    \"pan-class1\",\n                    len(self.class1_pan_allele_models))\n                row = pandas.Series(collections.OrderedDict([\n                    (\"model_name\", model_name),\n                    (\"allele\", \"pan-class1\"),\n                    (\"config_json\", json.dumps(model.get_config())),\n                    (\"model\", model),\n                ])).to_frame().T\n                new_manifest_rows.append(row)\n                self.class1_pan_allele_models.append(model)\n                new_model_names.append(model_name)\n\n            for allele in predictor.allele_to_allele_specific_models:\n                if allele not in self.allele_to_allele_specific_models:\n                    self.allele_to_allele_specific_models[allele] = []\n                current_models = self.allele_to_allele_specific_models[allele]\n                for model in predictor.allele_to_allele_specific_models[allele]:\n                    model_name = self.model_name(allele, len(current_models))\n                    row = pandas.Series(collections.OrderedDict([\n                        (\"model_name\", model_name),\n                        (\"allele\", allele),\n                        (\"config_json\", json.dumps(model.get_config())),\n                        (\"model\", model),\n                    ])).to_frame().T\n                    new_manifest_rows.append(row)\n                    current_models.append(model)\n                    new_model_names.append(model_name)\n\n        self._manifest_df = pandas.concat(\n            [original_manifest] + new_manifest_rows,\n            ignore_index=True)\n\n        self.clear_cache()\n        self.check_consistency()\n        return new_model_names\n\n    def canonicalize_allele_name(self, raw_name):\n        \"\"\"\n        Normalize an allele name and map it to the canonical pseudosequence\n        key if possible.\n\n        Tries without IMGT aliases first so that alleles like HLA-C*01:01\n        (which aliases map to C*01:02) resolve to their own pseudosequence\n        when one exists.\n\n        Parameters\n        ----------\n        raw_name : str\n\n        Returns\n        -------\n        str\n        \"\"\"\n        # Try without aliases first — this matches pseudosequence keys\n        # directly and avoids mhcgnomes alias remapping or Q/N annotations.\n        if self.allele_to_sequence:\n            no_alias = normalize_allele_name(\n                raw_name, raise_on_error=False, use_allele_aliases=False)\n            if no_alias is not None and no_alias in self.allele_to_sequence:\n                return no_alias\n        # Fall back to aliases and map through canonical lookup.\n        normalized = normalize_allele_name(raw_name)\n        return self.allele_to_canonical.get(normalized, normalized)\n\n    @property\n    def supported_alleles(self):\n        \"\"\"\n        Alleles for which predictions can be made.\n\n        Returns\n        -------\n        list of string\n        \"\"\"\n        if 'supported_alleles' not in self._cache:\n            result = set(self.allele_to_allele_specific_models)\n            if self.allele_to_sequence:\n                result = result.union(self.allele_to_sequence)\n            self._cache[\"supported_alleles\"] = sorted(result)\n        return self._cache[\"supported_alleles\"]\n\n    @property\n    def supported_peptide_lengths(self):\n        \"\"\"\n        (minimum, maximum) lengths of peptides supported by *all models*,\n        inclusive.\n\n        Returns\n        -------\n        (int, int) tuple\n\n        \"\"\"\n        if 'supported_peptide_lengths' not in self._cache:\n            length_ranges = set(\n                network.supported_peptide_lengths\n                for network in self.neural_networks)\n            result = (\n                max(lower for (lower, upper) in length_ranges),\n                min(upper for (lower, upper) in length_ranges))\n            self._cache[\"supported_peptide_lengths\"] = result\n        return self._cache[\"supported_peptide_lengths\"]\n\n    def check_consistency(self):\n        \"\"\"\n        Verify that self.manifest_df is consistent with:\n        - self.class1_pan_allele_models\n        - self.allele_to_allele_specific_models\n\n        Currently only checks for agreement on the total number of models.\n\n        Throws AssertionError if inconsistent.\n        \"\"\"\n        num_models = len(self.class1_pan_allele_models) + sum(\n            len(v) for v in self.allele_to_allele_specific_models.values())\n        assert len(self.manifest_df) == num_models, (\n            \"Manifest seems out of sync with models: %d vs %d entries: \"\n            \"\\n%s\\npan-allele: %s\\nallele-specific: %s\"% (\n                len(self.manifest_df),\n                num_models,\n                str(self.manifest_df),\n                str(self.class1_pan_allele_models),\n                str(self.allele_to_allele_specific_models)))\n\n    def save(self, models_dir, model_names_to_write=None, write_metadata=True):\n        \"\"\"\n        Serialize the predictor to a directory on disk. If the directory does\n        not exist it will be created.\n\n        The serialization format consists of a file called \"manifest.csv\" with\n        the configurations of each Class1NeuralNetwork, along with per-network\n        files giving the model weights. If there are pan-allele predictors in\n        the ensemble, the allele sequences are also stored in the\n        directory. There is also a small file \"index.txt\" with basic metadata:\n        when the models were trained, by whom, on what host.\n\n        Parameters\n        ----------\n        models_dir : string\n            Path to directory. It will be created if it doesn't exist.\n\n        model_names_to_write : list of string, optional\n            Only write the weights for the specified models. Useful for\n            incremental updates during training.\n\n        write_metadata : boolean, optional\n            Whether to write optional metadata\n        \"\"\"\n        self.check_consistency()\n\n        if model_names_to_write is None:\n            # Write all models\n            model_names_to_write = self.manifest_df.model_name.values\n\n        if not exists(models_dir):\n            mkdir(models_dir)\n\n        sub_manifest_df = self.manifest_df.loc[\n            self.manifest_df.model_name.isin(model_names_to_write)\n        ].copy()\n\n        # Network JSON configs may have changed since the models were added,\n        # for example due to changes to the allele representation layer.\n        # So we update the JSON configs here also.\n        updated_network_config_jsons = []\n        for (_, row) in sub_manifest_df.iterrows():\n            updated_network_config_jsons.append(\n                json.dumps(row.model.get_config()))\n            weights_path = self.weights_path(models_dir, row.model_name)\n            save_weights(row.model.get_weights(), weights_path)\n            logging.info(\"Wrote: %s\", weights_path)\n        sub_manifest_df[\"config_json\"] = updated_network_config_jsons\n        self.manifest_df.loc[\n            sub_manifest_df.index,\n            \"config_json\"\n        ] = updated_network_config_jsons\n\n        write_manifest_df = self.manifest_df[[\n            c for c in self.manifest_df.columns if c != \"model\"\n        ]]\n        manifest_path = join(models_dir, \"manifest.csv\")\n        write_manifest_df.to_csv(manifest_path, index=False)\n        logging.info(\"Wrote: %s\", manifest_path)\n\n        if write_metadata:\n            # Write \"info.txt\"\n            info_path = join(models_dir, \"info.txt\")\n            rows = [\n                (\"trained on\", time.asctime()),\n                (\"package   \", \"mhcflurry %s\" % __version__),\n                (\"hostname  \", gethostname()),\n                (\"user      \", getuser()),\n            ]\n            pandas.DataFrame(rows).to_csv(\n                info_path, sep=\"\\t\", header=False, index=False)\n\n            if self.metadata_dataframes:\n                for (name, df) in self.metadata_dataframes.items():\n                    metadata_df_path = join(models_dir, \"%s.csv.bz2\" % name)\n                    df.to_csv(metadata_df_path, index=False, compression=\"bz2\")\n\n        # Save allele sequences\n        if self.allele_to_sequence is not None:\n            allele_to_sequence_df = pandas.DataFrame(\n                list(self.allele_to_sequence.items()),\n                columns=['allele', 'sequence']\n            )\n            allele_to_sequence_df.to_csv(\n                join(models_dir, \"allele_sequences.csv\"), index=False)\n            logging.info(\"Wrote: %s\", join(models_dir, \"allele_sequences.csv\"))\n\n        if self.allele_to_percent_rank_transform:\n            percent_ranks_df = None\n            for (allele, transform) in self.allele_to_percent_rank_transform.items():\n                series = transform.to_series()\n                if percent_ranks_df is None:\n                    percent_ranks_df = {}\n                    percent_ranks_df_index = series.index\n                numpy.testing.assert_array_almost_equal(\n                    series.index.values,\n                    percent_ranks_df_index.values)\n                percent_ranks_df[allele] = series.values\n            percent_ranks_df = pandas.DataFrame(\n                percent_ranks_df,\n                index=percent_ranks_df_index)\n            percent_ranks_path = join(models_dir, \"percent_ranks.csv\")\n            percent_ranks_df.to_csv(\n                percent_ranks_path,\n                index=True,\n                index_label=\"bin\")\n            logging.info(\"Wrote: %s\", percent_ranks_path)\n\n        if self.optimization_info:\n            # If the model being saved was optimized, we need to save that\n            # information since it can affect how predictions are performed\n            # (e.g. stitched-together ensembles output concatenated results,\n            # which then need to be averaged outside the model).\n            optimization_info_path = join(models_dir, \"optimization_info.json\")\n            with open(optimization_info_path, \"w\") as fd:\n                json.dump(self.optimization_info, fd, indent=4)\n\n    @staticmethod\n    def load(models_dir=None, max_models=None, optimization_level=None):\n        \"\"\"\n        Deserialize a predictor from a directory on disk.\n\n        Parameters\n        ----------\n        models_dir : string\n            Path to directory. If unspecified the default downloaded models are\n            used.\n\n        max_models : int, optional\n            Maximum number of `Class1NeuralNetwork` instances to load\n\n        optimization_level : int\n            If >0, model optimization will be attempted. Defaults to value of\n            environment variable MHCFLURRY_OPTIMIZATION_LEVEL.\n\n        Returns\n        -------\n        `Class1AffinityPredictor` instance\n        \"\"\"\n        if models_dir is None:\n            try:\n                models_dir = get_default_class1_models_dir()\n            except RuntimeError as e:\n                # Fall back to the affinity predictor included in presentation\n                # predictor if possible.\n                from mhcflurry.class1_presentation_predictor import (\n                    Class1PresentationPredictor)\n                try:\n                    presentation_predictor = Class1PresentationPredictor.load()\n                    return presentation_predictor.affinity_predictor\n                except RuntimeError:\n                    raise e\n\n        if optimization_level is None:\n            optimization_level = OPTIMIZATION_LEVEL\n\n        manifest_path = join(models_dir, \"manifest.csv\")\n        manifest_df = pandas.read_csv(manifest_path, nrows=max_models)\n\n        # ----- Load pseudosequences first so we can canonicalize -----\n        allele_to_sequence = None\n        allele_to_canonical = {}\n        if exists(join(models_dir, \"allele_sequences.csv\")):\n            allele_to_sequence = pandas.read_csv(\n                join(models_dir, \"allele_sequences.csv\"),\n                index_col=0).iloc[:, 0].to_dict()\n\n            # Re-normalize allele names. We first try without IMGT allele\n            # aliases to preserve current nomenclature. If the parse fails\n            # or the pseudosequence contains unknown (X) positions, we\n            # retry with aliases — retired allele names like B*44:01 (an\n            # IMGT error reassigned to B*44:02 in 1994) often have\n            # incomplete pseudosequences, and the alias target may have a\n            # complete one. If mhcgnomes can't parse either way, keep the\n            # raw name so the pseudosequence remains available.\n            renormalized = {}\n            skipped_non_class1 = []\n            for (name, value) in allele_to_sequence.items():\n                normalized = normalize_allele_name(\n                    name, raise_on_error=False, use_allele_aliases=False)\n                if normalized is None or \"X\" in value:\n                    alias_normalized = normalize_allele_name(\n                        name, raise_on_error=False, use_allele_aliases=True)\n                    if alias_normalized is not None:\n                        normalized = alias_normalized\n                if normalized is None:\n                    # Detect class II, TAP, and pseudogene entries —\n                    # these don't belong in a class I predictor and\n                    # always have incomplete pseudosequences.\n                    gene = name.split(\"*\")[0].split(\"-\")[-1] if \"-\" in name else \"\"\n                    if (\"X\" in value and\n                            any(tag in gene\n                                for tag in (\"DAA\", \"DAB\", \"TAP\", \"PS\"))):\n                        skipped_non_class1.append(name)\n                        continue\n                    normalized = name\n                if normalized in renormalized and name != normalized:\n                    existing = renormalized[normalized]\n                    if value.count(\"X\") < existing.count(\"X\"):\n                        renormalized[normalized] = value\n                    continue\n                renormalized[normalized] = value\n            allele_to_sequence = renormalized\n            if skipped_non_class1:\n                logging.info(\n                    \"Skipped %d non-class-I entries from pseudosequence \"\n                    \"file (class II / TAP / pseudogene with incomplete \"\n                    \"pseudosequences): %s\",\n                    len(skipped_non_class1),\n                    \", \".join(sorted(skipped_non_class1)[:10])\n                    + (\" ...\" if len(skipped_non_class1) > 10 else \"\"))\n\n            # Map mhcgnomes-aliased forms back to pseudosequence keys.\n            # e.g. Mamu-A1*007:01 -> Mamu-A*07:01\n            for canonical_name in allele_to_sequence:\n                aliased = normalize_allele_name(\n                    canonical_name, raise_on_error=False,\n                    use_allele_aliases=True)\n                if (aliased is not None and aliased != canonical_name\n                        and aliased not in allele_to_sequence):\n                    allele_to_canonical[aliased] = canonical_name\n\n        def to_canonical(raw_name):\n            \"\"\"Normalize a raw allele name to its canonical pseudosequence key.\"\"\"\n            n = normalize_allele_name(raw_name, raise_on_error=False) or raw_name\n            return allele_to_canonical.get(n, n)\n\n        # ----- Load manifest -----\n        allele_to_allele_specific_models = collections.defaultdict(list)\n        class1_pan_allele_models = []\n        all_models = []\n        for (_, row) in manifest_df.iterrows():\n            weights_filename = Class1AffinityPredictor.weights_path(\n                models_dir, row.model_name)\n            config = json.loads(row.config_json)\n\n            model = Class1NeuralNetwork.from_config(\n                config,\n                weights_loader=partial(load_weights, abspath(weights_filename)))\n            if row.allele == \"pan-class1\":\n                class1_pan_allele_models.append(model)\n            else:\n                allele_to_allele_specific_models[\n                    to_canonical(row.allele)].append(model)\n            all_models.append(model)\n\n        manifest_df[\"model\"] = all_models\n\n        # ----- Load percent ranks -----\n        allele_to_percent_rank_transform = {}\n        percent_ranks_path = join(models_dir, \"percent_ranks.csv\")\n        if exists(percent_ranks_path):\n            percent_ranks_df = pandas.read_csv(percent_ranks_path, index_col=0)\n            for allele in percent_ranks_df.columns:\n                canonical = to_canonical(allele)\n                if (canonical in allele_to_percent_rank_transform and\n                        allele != canonical):\n                    continue\n                allele_to_percent_rank_transform[canonical] = (\n                    PercentRankTransform.from_series(percent_ranks_df[allele]))\n\n        logging.info(\n            \"Loaded %d class1 pan allele predictors, %d allele sequences, \"\n            \"%d percent rank distributions, and %d allele specific models: %s\",\n            len(class1_pan_allele_models),\n            len(allele_to_sequence) if allele_to_sequence else 0,\n            len(allele_to_percent_rank_transform),\n            sum(len(v) for v in allele_to_allele_specific_models.values()),\n            \", \".join(\n                \"%s (%d)\" % (allele, len(v))\n                for (allele, v)\n                in sorted(allele_to_allele_specific_models.items())))\n\n        provenance_string = None\n        try:\n            info_path = join(models_dir, \"info.txt\")\n            info = pandas.read_csv(\n                info_path, sep=\"\\t\", header=None, index_col=0).iloc[\n                :, 0\n            ].to_dict()\n            provenance_string = \"generated on %s\" % info[\"trained on\"]\n        except OSError:\n            pass\n\n        optimization_info = None\n        try:\n            optimization_info_path = join(models_dir, \"optimization_info.json\")\n            with open(optimization_info_path) as fd:\n                optimization_info = json.load(fd)\n        except OSError:\n            pass\n\n        result = Class1AffinityPredictor(\n            allele_to_allele_specific_models=allele_to_allele_specific_models,\n            class1_pan_allele_models=class1_pan_allele_models,\n            allele_to_sequence=allele_to_sequence,\n            manifest_df=manifest_df,\n            allele_to_percent_rank_transform=allele_to_percent_rank_transform,\n            provenance_string=provenance_string,\n            optimization_info=optimization_info,\n        )\n        if allele_to_sequence is not None:\n            result.allele_to_canonical = allele_to_canonical\n        if optimization_level >= 1:\n            optimized = result.optimize()\n            logging.info(\n                \"Model optimization %s\",\n                \"succeeded\" if optimized else \"not supported for these models\")\n        return result\n\n    def __repr__(self):\n        pieces = [\"at 0x%0x\" % id(self), \"[mhcflurry %s]\" % __version__]\n\n        pan_models = len(self.class1_pan_allele_models)\n        total_models = len(self.neural_networks)\n        if total_models == 0:\n            pieces.append(\"[empty]\")\n        elif pan_models == total_models:\n            pieces.append(\"[pan]\")\n        elif pan_models == 0:\n            pieces.append(\"[allele-specific]\")\n        else:\n            pieces.append(\"[pan+allele-specific]\")\n\n        if self.provenance_string:\n            pieces.append(self.provenance_string)\n\n        return \"<Class1AffinityPredictor %s>\" % \" \".join(pieces)\n\n    def optimize(self, warn=True):\n        \"\"\"\n        EXPERIMENTAL: Optimize the predictor for faster predictions.\n\n        Currently the only optimization implemented is to merge multiple pan-\n        allele predictors at the PyTorch level.\n\n        The optimization is performed in-place, mutating the instance.\n\n        Returns\n        ----------\n        bool\n            Whether optimization was performed\n\n        \"\"\"\n        num_class1_pan_allele_models = len(self.class1_pan_allele_models)\n        if num_class1_pan_allele_models > 1:\n            provenance_string = self.provenance_string\n            try:\n                self.class1_pan_allele_models = [\n                    Class1NeuralNetwork.merge(\n                        self.class1_pan_allele_models,\n                        merge_method=\"concatenate\")\n                ]\n            except NotImplementedError as e:\n                if warn:\n                    logging.warning(\"Optimization failed: %s\", str(e))\n                return False\n            self._manifest_df = None\n            self.clear_cache()\n            self.optimization_info[\"pan_models_merged\"] = True\n            self.optimization_info[\"num_pan_models_merged\"] = (\n                num_class1_pan_allele_models)\n            self.provenance_string = provenance_string\n        else:\n            return False\n        return True\n\n    @staticmethod\n    def model_name(allele, num):\n        \"\"\"\n        Generate a model name\n\n        Parameters\n        ----------\n        allele : string\n        num : int\n\n        Returns\n        -------\n        string\n\n        \"\"\"\n        random_string = hashlib.sha1(\n            str(time.time()).encode()).hexdigest()[:16]\n        return \"%s-%d-%s\" % (\n            allele.upper().replace(\"*\", \"_\").replace(\":\", \"_\"),\n            num,\n            random_string)\n\n    @staticmethod\n    def weights_path(models_dir, model_name):\n        \"\"\"\n        Generate the path to the weights file for a model\n\n        Parameters\n        ----------\n        models_dir : string\n        model_name : string\n\n        Returns\n        -------\n        string\n        \"\"\"\n        return join(models_dir, \"weights_%s.npz\" % model_name)\n\n    @property\n    def master_allele_encoding(self):\n        \"\"\"\n        An AlleleEncoding containing the universe of alleles specified by\n        self.allele_to_sequence.\n\n        Returns\n        -------\n        AlleleEncoding\n        \"\"\"\n        if (self._master_allele_encoding is None or\n                self._master_allele_encoding.allele_to_sequence !=\n                self.allele_to_sequence):\n            self._master_allele_encoding = AlleleEncoding(\n                allele_to_sequence=self.allele_to_sequence)\n        return self._master_allele_encoding\n\n    def fit_allele_specific_predictors(\n            self,\n            n_models,\n            architecture_hyperparameters_list,\n            allele,\n            peptides,\n            affinities,\n            inequalities=None,\n            train_rounds=None,\n            models_dir_for_save=None,\n            verbose=0,\n            progress_preamble=\"\",\n            progress_print_interval=5.0):\n        \"\"\"\n        Fit one or more allele specific predictors for a single allele using one\n        or more neural network architectures.\n\n        The new predictors are saved in the Class1AffinityPredictor instance\n        and will be used on subsequent calls to `predict`.\n\n        Parameters\n        ----------\n        n_models : int\n            Number of neural networks to fit\n\n        architecture_hyperparameters_list : list of dict\n            List of hyperparameter sets.\n\n        allele : string\n\n        peptides : `EncodableSequences` or list of string\n\n        affinities : list of float\n            nM affinities\n\n        inequalities : list of string, each element one of \">\", \"<\", or \"=\"\n            See `Class1NeuralNetwork.fit` for details.\n\n        train_rounds : sequence of int\n            Each training point i will be used on training rounds r for which\n            train_rounds[i] > r, r >= 0.\n\n        models_dir_for_save : string, optional\n            If specified, the Class1AffinityPredictor is (incrementally) written\n            to the given models dir after each neural network is fit.\n\n        verbose : int\n            Verbosity level for training output\n\n        progress_preamble : string\n            Optional string of information to include in each progress update\n\n        progress_print_interval : float\n            How often (in seconds) to print progress. Set to None to disable.\n\n        Returns\n        -------\n        list of `Class1NeuralNetwork`\n        \"\"\"\n\n        allele = normalize_allele_name(allele)\n        if allele not in self.allele_to_allele_specific_models:\n            self.allele_to_allele_specific_models[allele] = []\n\n        encodable_peptides = EncodableSequences.create(peptides)\n        peptides_affinities_inequalities_per_round = [\n            (encodable_peptides, affinities, inequalities)\n        ]\n\n        if train_rounds is not None:\n            for round in sorted(set(train_rounds)):\n                round_mask = train_rounds > round\n                if round_mask.any():\n                    sub_encodable_peptides = EncodableSequences.create(\n                        encodable_peptides.sequences[round_mask])\n                    peptides_affinities_inequalities_per_round.append((\n                        sub_encodable_peptides,\n                        affinities[round_mask],\n                        None if inequalities is None else inequalities[round_mask]))\n        n_rounds = len(peptides_affinities_inequalities_per_round)\n\n        n_architectures = len(architecture_hyperparameters_list)\n\n        # Adjust progress info to indicate number of models and\n        # architectures.\n        pieces = []\n        if n_models > 1:\n            pieces.append(\"Model {model_num:2d} / {n_models:2d}\")\n        if n_architectures > 1:\n            pieces.append(\n                \"Architecture {architecture_num:2d} / {n_architectures:2d}\")\n        if len(peptides_affinities_inequalities_per_round) > 1:\n            pieces.append(\"Round {round:2d} / {n_rounds:2d}\")\n        pieces.append(\"{n_peptides:4d} peptides\")\n        progress_preamble_template = \"[ %s ] {user_progress_preamble}\" % (\n            \", \".join(pieces))\n\n        models = []\n        for model_num in range(n_models):\n            for (architecture_num, architecture_hyperparameters) in enumerate(\n                    architecture_hyperparameters_list):\n                model = Class1NeuralNetwork(**architecture_hyperparameters)\n                for round_num in range(n_rounds):\n                    (round_peptides, round_affinities, round_inequalities) = (\n                        peptides_affinities_inequalities_per_round[round_num]\n                    )\n                    model.fit(\n                        round_peptides,\n                        round_affinities,\n                        inequalities=round_inequalities,\n                        verbose=verbose,\n                        progress_preamble=progress_preamble_template.format(\n                            n_peptides=len(round_peptides),\n                            round=round_num,\n                            n_rounds=n_rounds,\n                            user_progress_preamble=progress_preamble,\n                            model_num=model_num + 1,\n                            n_models=n_models,\n                            architecture_num=architecture_num + 1,\n                            n_architectures=n_architectures),\n                        progress_print_interval=progress_print_interval)\n\n                model_name = self.model_name(allele, model_num)\n                row = pandas.Series(collections.OrderedDict([\n                    (\"model_name\", model_name),\n                    (\"allele\", allele),\n                    (\"config_json\", json.dumps(model.get_config())),\n                    (\"model\", model),\n                ])).to_frame().T\n                self._manifest_df = pandas.concat(\n                    [self.manifest_df, row], ignore_index=True)\n                self.allele_to_allele_specific_models[allele].append(model)\n                if models_dir_for_save:\n                    self.save(\n                        models_dir_for_save, model_names_to_write=[model_name])\n                models.append(model)\n\n        self.clear_cache()\n        return models\n\n    def fit_class1_pan_allele_models(\n            self,\n            n_models,\n            architecture_hyperparameters,\n            alleles,\n            peptides,\n            affinities,\n            inequalities,\n            models_dir_for_save=None,\n            verbose=1,\n            progress_preamble=\"\",\n            progress_print_interval=5.0):\n        \"\"\"\n        Fit one or more pan-allele predictors using a single neural network\n        architecture.\n\n        The new predictors are saved in the Class1AffinityPredictor instance\n        and will be used on subsequent calls to `predict`.\n\n        Parameters\n        ----------\n        n_models : int\n            Number of neural networks to fit\n\n        architecture_hyperparameters : dict\n\n        alleles : list of string\n            Allele names (not sequences) corresponding to each peptide\n\n        peptides : `EncodableSequences` or list of string\n\n        affinities : list of float\n            nM affinities\n\n        inequalities : list of string, each element one of \">\", \"<\", or \"=\"\n            See Class1NeuralNetwork.fit for details.\n\n        models_dir_for_save : string, optional\n            If specified, the Class1AffinityPredictor is (incrementally) written\n            to the given models dir after each neural network is fit.\n\n        verbose : int\n            Verbosity level for training output\n\n        progress_preamble : string\n            Optional string of information to include in each progress update\n\n        progress_print_interval : float\n            How often (in seconds) to print progress. Set to None to disable.\n\n        Returns\n        -------\n        list of `Class1NeuralNetwork`\n        \"\"\"\n\n        alleles = pandas.Series(alleles).map(normalize_allele_name)\n        allele_encoding = AlleleEncoding(\n            alleles,\n            borrow_from=self.master_allele_encoding)\n\n        encodable_peptides = EncodableSequences.create(peptides)\n        models = []\n        for i in range(n_models):\n            logging.info(\"Training model %d / %d\", i + 1, n_models)\n            model = Class1NeuralNetwork(**architecture_hyperparameters)\n            model.fit(\n                encodable_peptides,\n                affinities,\n                inequalities=inequalities,\n                allele_encoding=allele_encoding,\n                verbose=verbose,\n                progress_preamble=progress_preamble,\n                progress_print_interval=progress_print_interval)\n\n            model_name = self.model_name(\"pan-class1\", i)\n            row = pandas.Series(collections.OrderedDict([\n                (\"model_name\", model_name),\n                (\"allele\", \"pan-class1\"),\n                (\"config_json\", json.dumps(model.get_config())),\n                (\"model\", model),\n            ])).to_frame().T\n            self._manifest_df = pandas.concat(\n                [self.manifest_df, row], ignore_index=True)\n            self.class1_pan_allele_models.append(model)\n            if models_dir_for_save:\n                self.save(\n                    models_dir_for_save, model_names_to_write=[model_name])\n            models.append(model)\n\n        self.clear_cache()\n        return models\n\n    def add_pan_allele_model(self, model, models_dir_for_save=None):\n        \"\"\"\n        Add a pan-allele model to the ensemble and optionally do an incremental\n        save.\n\n        Parameters\n        ----------\n        model : Class1NeuralNetwork\n        models_dir_for_save : string\n            Directory to save resulting ensemble to\n        \"\"\"\n        model_name = self.model_name(\"pan-class1\", 1)\n        row = pandas.Series(collections.OrderedDict([\n            (\"model_name\", model_name),\n            (\"allele\", \"pan-class1\"),\n            (\"config_json\", json.dumps(model.get_config())),\n            (\"model\", model),\n        ])).to_frame().T\n        self._manifest_df = pandas.concat(\n            [self.manifest_df, row], ignore_index=True)\n        self.class1_pan_allele_models.append(model)\n        self.clear_cache()\n        self.check_consistency()\n        if models_dir_for_save:\n            self.save(\n                models_dir_for_save, model_names_to_write=[model_name])\n\n    def percentile_ranks(self, affinities, allele=None, alleles=None, throw=True):\n        \"\"\"\n        Return percentile ranks for the given ic50 affinities and alleles.\n\n        The 'allele' and 'alleles' argument are as in the `predict` method.\n        Specify one of these.\n\n        Parameters\n        ----------\n        affinities : sequence of float\n            nM affinities\n        allele : string\n        alleles : sequence of string\n        throw : boolean\n            If True, a ValueError will be raised in the case of unsupported\n            alleles. If False, a warning will be logged and NaN will be returned\n            for those percentile ranks.\n\n        Returns\n        -------\n        numpy.array of float\n        \"\"\"\n        if allele is not None:\n            normalized_allele = self.canonicalize_allele_name(allele)\n            try:\n                transform = self.allele_to_percent_rank_transform[normalized_allele]\n                return transform.transform(affinities)\n            except KeyError:\n                if self.allele_to_sequence:\n                    # See if we have information for an equivalent allele\n                    sequence = self.allele_to_sequence[normalized_allele]\n                    other_alleles = [\n                        other_allele for (other_allele, other_sequence)\n                        in self.allele_to_sequence.items()\n                        if other_sequence == sequence\n                    ]\n                    for other_allele in other_alleles:\n                        if other_allele in self.allele_to_percent_rank_transform:\n                            transform = self.allele_to_percent_rank_transform[\n                                other_allele]\n                            return transform.transform(affinities)\n\n                msg = \"Allele %s has no percentile rank information\" % (\n                    allele + (\n                        \"\" if allele == normalized_allele\n                        else \" (normalized to %s)\" % normalized_allele))\n                if throw:\n                    raise ValueError(msg)\n                warnings.warn(msg)\n                return numpy.ones(len(affinities)) * numpy.nan  # Return NaNs\n\n        if alleles is None:\n            raise ValueError(\"Specify allele or alleles\")\n\n        df = pandas.DataFrame({\"affinity\": affinities})\n        df[\"allele\"] = alleles\n        df[\"result\"] = numpy.nan\n        for (allele, sub_df) in df.groupby(\"allele\"):\n            df.loc[sub_df.index, \"result\"] = self.percentile_ranks(\n                sub_df.affinity, allele=allele, throw=throw)\n        return df.result.values\n\n    def predict(\n            self,\n            peptides,\n            alleles=None,\n            allele=None,\n            throw=True,\n            centrality_measure=DEFAULT_CENTRALITY_MEASURE,\n            model_kwargs={}):\n        \"\"\"\n        Predict nM binding affinities.\n\n        If multiple predictors are available for an allele, the predictions are\n        the geometric means of the individual model (nM) predictions.\n\n        One of 'allele' or 'alleles' must be specified. If 'allele' is specified\n        all predictions will be for the given allele. If 'alleles' is specified\n        it must be the same length as 'peptides' and give the allele\n        corresponding to each peptide.\n\n        Parameters\n        ----------\n        peptides : `EncodableSequences` or list of string\n        alleles : list of string\n        allele : string\n        throw : boolean\n            If True, a ValueError will be raised in the case of unsupported\n            alleles or peptide lengths. If False, a warning will be logged and\n            the predictions for the unsupported alleles or peptides will be NaN.\n        centrality_measure : string or callable\n            Measure of central tendency to use to combine predictions in the\n            ensemble. Options include: mean, median, robust_mean.\n        model_kwargs : dict\n            Additional keyword arguments to pass to Class1NeuralNetwork.predict\n\n        Returns\n        -------\n        numpy.array of predictions\n        \"\"\"\n        df = self.predict_to_dataframe(\n            peptides=peptides,\n            alleles=alleles,\n            allele=allele,\n            throw=throw,\n            include_percentile_ranks=False,\n            include_confidence_intervals=False,\n            centrality_measure=centrality_measure,\n            model_kwargs=model_kwargs\n        )\n        return df.prediction.values\n\n    def predict_to_dataframe(\n            self,\n            peptides,\n            alleles=None,\n            allele=None,\n            throw=True,\n            include_individual_model_predictions=False,\n            include_percentile_ranks=True,\n            include_confidence_intervals=True,\n            centrality_measure=DEFAULT_CENTRALITY_MEASURE,\n            model_kwargs={}):\n        \"\"\"\n        Predict nM binding affinities. Gives more detailed output than `predict`\n        method, including 5-95% prediction intervals.\n\n        If multiple predictors are available for an allele, the predictions are\n        the geometric means of the individual model predictions.\n\n        One of 'allele' or 'alleles' must be specified. If 'allele' is specified\n        all predictions will be for the given allele. If 'alleles' is specified\n        it must be the same length as 'peptides' and give the allele\n        corresponding to each peptide.\n\n        Parameters\n        ----------\n        peptides : `EncodableSequences` or list of string\n        alleles : list of string\n        allele : string\n        throw : boolean\n            If True, a ValueError will be raised in the case of unsupported\n            alleles or peptide lengths. If False, a warning will be logged and\n            the predictions for the unsupported alleles or peptides will be NaN.\n        include_individual_model_predictions : boolean\n            If True, the predictions of each individual model are included as\n            columns in the result DataFrame.\n        include_percentile_ranks : boolean, default True\n            If True, a \"prediction_percentile\" column will be included giving\n            the percentile ranks. If no percentile rank info is available,\n            this will be ignored with a warning.\n        centrality_measure : string or callable\n            Measure of central tendency to use to combine predictions in the\n            ensemble. Options include: mean, median, robust_mean.\n        model_kwargs : dict\n            Additional keyword arguments to pass to Class1NeuralNetwork.predict\n\n        Returns\n        -------\n        `pandas.DataFrame` of predictions\n        \"\"\"\n        if isinstance(peptides, str):\n            raise TypeError(\"peptides must be a list or array, not a string\")\n        if isinstance(alleles, str):\n            raise TypeError(\"alleles must be a list or array, not a string\")\n        if allele is None and alleles is None:\n            raise ValueError(\"Must specify 'allele' or 'alleles'.\")\n\n        peptides = EncodableSequences.create(peptides)\n        df = pandas.DataFrame({\n            'peptide': peptides.sequences\n        }, copy=False)\n\n        if allele is not None:\n            if alleles is not None:\n                raise ValueError(\"Specify exactly one of allele or alleles\")\n            normalized_allele = self.canonicalize_allele_name(allele)\n            df[\"allele\"] = normalized_allele\n            df[\"normalized_allele\"] = normalized_allele\n            unique_alleles = [normalized_allele]\n        else:\n            df[\"allele\"] = [\n                self.canonicalize_allele_name(a) for a in alleles]\n            df[\"normalized_allele\"] = df[\"allele\"]\n            unique_alleles = df.normalized_allele.unique()\n\n        if len(df) == 0:\n            # No predictions.\n            logging.warning(\"Predicting for 0 peptides.\")\n            empty_result = pandas.DataFrame(\n                columns=[\n                    'peptide',\n                    'allele',\n                    'prediction',\n                    'prediction_low',\n                    'prediction_high'\n                ])\n            return empty_result\n\n        (min_peptide_length, max_peptide_length) = (\n            self.supported_peptide_lengths)\n\n        if (peptides.min_length < min_peptide_length or\n                peptides.max_length > max_peptide_length):\n            # Only compute this if needed\n            all_peptides_supported = False\n            sequence_length = df.peptide.str.len()\n            df[\"supported_peptide\"] = (\n                (sequence_length >= min_peptide_length) &\n                (sequence_length <= max_peptide_length))\n            if (~df.supported_peptide).any():\n                msg = (\n                    \"%d peptides have lengths outside of supported range [%d, %d]: \"\n                    \"%s\" % (\n                        (~df.supported_peptide).sum(),\n                        min_peptide_length,\n                        max_peptide_length,\n                        str(df.loc[~df.supported_peptide].peptide.unique())))\n                logging.warning(msg)\n                if throw:\n                    raise ValueError(msg)\n        else:\n            # Handle common case efficiently.\n            df[\"supported_peptide\"] = True\n            all_peptides_supported = True\n\n        peptide_has_valid_amino_acids = (\n            (~df.supported_peptide) |\n            df.peptide.str.upper().str.match(\"^[ACDEFGHIKLMNPQRSTVWY]+$\"))\n        df[\"supported_peptide\"] = (\n                df[\"supported_peptide\"] & peptide_has_valid_amino_acids)\n\n        if (~peptide_has_valid_amino_acids).any():\n            all_peptides_supported = False\n            msg = (\n                \"%d peptides have nonstandard amino acids: \"\n                \"%s\" % (\n                    (~peptide_has_valid_amino_acids).sum(),\n                    str(df.loc[~peptide_has_valid_amino_acids].peptide.unique())))\n            logging.warning(msg)\n            if throw:\n                raise ValueError(msg)\n\n        num_pan_models = (\n            len(self.class1_pan_allele_models)\n            if not self.optimization_info.get(\"pan_models_merged\", False)\n            else self.optimization_info[\"num_pan_models_merged\"])\n        max_single_allele_models = max(\n            len(self.allele_to_allele_specific_models.get(allele, []))\n            for allele in unique_alleles\n        )\n        predictions_array = numpy.zeros(\n            shape=(df.shape[0], num_pan_models + max_single_allele_models),\n            dtype=\"float64\")\n        predictions_array[:] = numpy.nan\n\n        if self.class1_pan_allele_models:\n            master_allele_encoding = self.master_allele_encoding\n            unsupported_alleles = [\n                allele for allele in\n                df.normalized_allele.unique()\n                if allele not in self.allele_to_sequence\n            ]\n            if unsupported_alleles:\n                truncate_at = 100\n                allele_string = \" \".join(\n                    sorted(self.allele_to_sequence)[:truncate_at])\n                if len(self.allele_to_sequence) > truncate_at:\n                    allele_string += \" + %d more alleles\" % (\n                        len(self.allele_to_sequence) - truncate_at)\n                msg = (\n                    \"No sequences for allele(s): %s.\\n\"\n                    \"Supported alleles: %s\" % (\n                        \" \".join(unsupported_alleles), allele_string))\n                logging.warning(msg)\n                if throw:\n                    raise ValueError(msg)\n            mask = df.supported_peptide & (\n                ~df.normalized_allele.isin(unsupported_alleles))\n\n            row_slice = None\n            if mask is None or mask.all():\n                row_slice = slice(None, None, None)  # all rows\n                masked_allele_encoding = AlleleEncoding(\n                    df.normalized_allele,\n                    borrow_from=master_allele_encoding)\n                masked_peptides = EncodableSequences.create(peptides)\n            elif mask.sum() > 0:\n                row_slice = mask\n                masked_allele_encoding = AlleleEncoding(\n                    df.loc[mask].normalized_allele,\n                    borrow_from=master_allele_encoding)\n                masked_peptides = EncodableSequences.create(\n                    peptides.sequences[mask])\n\n            if row_slice is not None:\n                # The following line is a performance optimization that may be\n                # revisited. It causes the neural network to set to include\n                # only the alleles actually being predicted for. This makes\n                # the network much smaller. However, subsequent calls to\n                # predict will need to reset these weights, so there is a\n                # tradeoff.\n                masked_allele_encoding = masked_allele_encoding.compact()\n\n                if self.optimization_info.get(\"pan_models_merged\"):\n                    # Multiple pan-allele models have been merged into one\n                    # at the PyTorch level.\n                    assert len(self.class1_pan_allele_models) == 1\n                    predictions = self.class1_pan_allele_models[0].predict(\n                        masked_peptides,\n                        allele_encoding=masked_allele_encoding,\n                        output_index=None,\n                        **model_kwargs)\n                    predictions_array[row_slice, :num_pan_models] = predictions\n                else:\n                    for (i, model) in enumerate(self.class1_pan_allele_models):\n                        predictions_array[row_slice, i] = model.predict(\n                            masked_peptides,\n                            allele_encoding=masked_allele_encoding,\n                            **model_kwargs)\n\n        if self.allele_to_allele_specific_models:\n            unsupported_alleles = [\n                allele for allele in unique_alleles\n                if not self.allele_to_allele_specific_models.get(allele)\n            ]\n            if unsupported_alleles:\n                msg = (\n                    \"No single-allele models for allele(s): %s.\\n\"\n                    \"Supported alleles are: %s\" % (\n                        \" \".join(unsupported_alleles),\n                        \" \".join(sorted(self.allele_to_allele_specific_models))))\n                logging.warning(msg)\n                if throw:\n                    raise ValueError(msg)\n\n            for allele in unique_alleles:\n                models = self.allele_to_allele_specific_models.get(allele, [])\n                if len(unique_alleles) == 1 and all_peptides_supported:\n                    mask = None\n                else:\n                    mask = (\n                        (df.normalized_allele == allele) &\n                        df.supported_peptide).values\n\n                row_slice = None\n                if mask is None or mask.all():\n                    peptides_for_allele = peptides\n                    row_slice = slice(None, None, None)\n                elif mask.sum() > 0:\n                    peptides_for_allele = EncodableSequences.create(\n                        df.loc[mask].peptide.values)\n                    row_slice = mask\n\n                if row_slice is not None:\n                    for (i, model) in enumerate(models):\n                        predictions_array[\n                            row_slice,\n                            num_pan_models + i,\n                        ] = model.predict(peptides_for_allele, **model_kwargs)\n\n        if callable(centrality_measure):\n            centrality_function = centrality_measure\n        else:\n            centrality_function = CENTRALITY_MEASURES[centrality_measure]\n\n        logs = numpy.log(predictions_array)\n        row_has_predictions = (~numpy.isnan(logs)).any(axis=1)\n        log_centers = numpy.full(df.shape[0], numpy.nan, dtype=\"float64\")\n        if row_has_predictions.any():\n            log_centers[row_has_predictions] = centrality_function(\n                logs[row_has_predictions]\n            )\n        df[\"prediction\"] = numpy.exp(log_centers)\n\n        if include_confidence_intervals:\n            prediction_low = numpy.full(df.shape[0], numpy.nan, dtype=\"float64\")\n            prediction_high = numpy.full(df.shape[0], numpy.nan, dtype=\"float64\")\n            if row_has_predictions.any():\n                prediction_low[row_has_predictions] = numpy.exp(\n                    numpy.nanpercentile(logs[row_has_predictions], 5.0, axis=1)\n                )\n                prediction_high[row_has_predictions] = numpy.exp(\n                    numpy.nanpercentile(logs[row_has_predictions], 95.0, axis=1)\n                )\n            df[\"prediction_low\"] = prediction_low\n            df[\"prediction_high\"] = prediction_high\n\n        if include_individual_model_predictions:\n            for i in range(num_pan_models):\n                df[\"model_pan_%d\" % i] = predictions_array[:, i]\n\n            for i in range(max_single_allele_models):\n                df[\"model_single_%d\" % i] = predictions_array[\n                    :, num_pan_models + i\n                ]\n\n        if include_percentile_ranks:\n            if self.allele_to_percent_rank_transform:\n                df[\"prediction_percentile\"] = self.percentile_ranks(\n                    df.prediction,\n                    alleles=df.normalized_allele.values,\n                    throw=throw)\n            else:\n                warnings.warn(\"No percentile rank information available.\")\n\n        del df[\"supported_peptide\"]\n        del df[\"normalized_allele\"]\n        return df\n\n    def calibrate_percentile_ranks(\n            self,\n            peptides=None,\n            num_peptides_per_length=int(1e5),\n            alleles=None,\n            bins=None,\n            motif_summary=False,\n            summary_top_peptide_fractions=[0.001],\n            verbose=False,\n            model_kwargs={}):\n        \"\"\"\n        Compute the cumulative distribution of ic50 values for a set of alleles\n        over a large universe of random peptides, to enable taking quantiles\n        of this distribution later.\n\n        Parameters\n        ----------\n        peptides : sequence of string or EncodableSequences, optional\n            Peptides to use\n        num_peptides_per_length : int, optional\n            If peptides argument is not specified, then num_peptides_per_length\n            peptides are randomly sampled from a uniform distribution for each\n            supported length\n        alleles : sequence of string, optional\n            Alleles to perform calibration for. If not specified all supported\n            alleles will be calibrated.\n        bins : object\n            Anything that can be passed to numpy.histogram's \"bins\" argument\n            can be used here, i.e. either an integer or a sequence giving bin\n            edges. This is in ic50 space.\n        motif_summary : bool\n            If True, the length distribution and per-position amino acid\n            frequencies are also calculated for the top x fraction of tightest-\n            binding peptides, where each value of x is given in the\n            summary_top_peptide_fractions list.\n        summary_top_peptide_fractions : list of float\n            Only used if motif_summary is True\n        verbose : boolean\n            Whether to print status updates to stdout\n        model_kwargs : dict\n            Additional low-level Class1NeuralNetwork.predict() kwargs.\n\n        Returns\n        ----------\n        dict of string -> pandas.DataFrame\n\n        If motif_summary is True, this will have keys  \"frequency_matrices\" and\n        \"length_distributions\". Otherwise it will be empty.\n\n        \"\"\"\n        if bins is None:\n            bins = to_ic50(numpy.linspace(1, 0, 1000))\n\n        if alleles is None:\n            alleles = self.supported_alleles\n\n        if peptides is None:\n            peptides = []\n            lengths = range(\n                self.supported_peptide_lengths[0],\n                self.supported_peptide_lengths[1] + 1)\n            for length in lengths:\n                peptides.extend(\n                    random_peptides(num_peptides_per_length, length))\n\n        encoded_peptides = EncodableSequences.create(peptides)\n\n        if motif_summary:\n            frequency_matrices = []\n            length_distributions = []\n        else:\n            frequency_matrices = None\n            length_distributions = None\n        for allele in alleles:\n            start = time.time()\n            predictions = self.predict(\n                encoded_peptides, allele=allele, model_kwargs=model_kwargs)\n            if verbose:\n                elapsed = time.time() - start\n                print(\n                    \"Generated %d predictions for allele %s in %0.2f sec: \"\n                    \"%0.2f predictions / sec\" % (\n                        len(encoded_peptides.sequences),\n                        allele,\n                        elapsed,\n                        len(encoded_peptides.sequences) / elapsed))\n            transform = PercentRankTransform()\n            transform.fit(predictions, bins=bins)\n            self.allele_to_percent_rank_transform[allele] = transform\n\n            if frequency_matrices is not None:\n                predictions_df = pandas.DataFrame({\n                    'peptide': encoded_peptides.sequences,\n                    'prediction': predictions\n                }).drop_duplicates('peptide').set_index(\"peptide\")\n                predictions_df[\"length\"] = predictions_df.index.str.len()\n                for (length, sub_df) in predictions_df.groupby(\"length\"):\n                    for cutoff_fraction in summary_top_peptide_fractions:\n                        selected = sub_df.prediction.nsmallest(\n                            max(\n                                int(len(sub_df) * cutoff_fraction),\n                                1)).index.values\n                        matrix = positional_frequency_matrix(selected).reset_index()\n                        original_columns = list(matrix.columns)\n                        matrix[\"allele\"] = allele\n                        matrix[\"length\"] = length\n                        matrix[\"cutoff_fraction\"] = cutoff_fraction\n                        matrix[\"cutoff_count\"] = len(selected)\n                        matrix = matrix[\n                            [\"allele\", \"length\", \"cutoff_fraction\", \"cutoff_count\"]\n                            + original_columns\n                        ]\n                        frequency_matrices.append(matrix)\n\n                # Length distribution\n                for cutoff_fraction in summary_top_peptide_fractions:\n                    cutoff_count = max(\n                        int(len(predictions_df) * cutoff_fraction), 1)\n                    length_distribution = predictions_df.prediction.nsmallest(\n                        cutoff_count).index.str.len().value_counts()\n                    length_distribution.index.name = \"length\"\n                    length_distribution /= length_distribution.sum()\n                    length_distribution = length_distribution.to_frame()\n                    length_distribution.columns = [\"fraction\"]\n                    length_distribution = length_distribution.reset_index()\n                    length_distribution[\"allele\"] = allele\n                    length_distribution[\"cutoff_fraction\"] = cutoff_fraction\n                    length_distribution[\"cutoff_count\"] = cutoff_count\n                    length_distribution = length_distribution[[\n                        \"allele\",\n                        \"cutoff_fraction\",\n                        \"cutoff_count\",\n                        \"length\",\n                        \"fraction\"\n                    ]].sort_values([\"cutoff_fraction\", \"length\"])\n                    length_distributions.append(length_distribution)\n\n        if frequency_matrices is not None:\n            frequency_matrices = pandas.concat(\n                frequency_matrices, ignore_index=True)\n\n        if length_distributions is not None:\n            length_distributions = pandas.concat(\n                length_distributions, ignore_index=True)\n\n        if motif_summary:\n            return {\n                'frequency_matrices': frequency_matrices,\n                'length_distributions': length_distributions,\n            }\n        return {}\n\n    def model_select(\n            self,\n            score_function,\n            alleles=None,\n            min_models=1,\n            max_models=10000):\n        \"\"\"\n        Perform model selection using a user-specified scoring function.\n\n        This works only with allele-specific models, not pan-allele models.\n\n        Model selection is done using a \"step up\" variable selection procedure,\n        in which models are repeatedly added to an ensemble until the score\n        stops improving.\n\n        Parameters\n        ----------\n        score_function : Class1AffinityPredictor -> float function\n            Scoring function\n\n        alleles : list of string, optional\n            If not specified, model selection is performed for all alleles.\n\n        min_models : int, optional\n            Min models to select per allele\n\n        max_models : int, optional\n            Max models to select per allele\n\n        Returns\n        -------\n        Class1AffinityPredictor : predictor containing the selected models\n        \"\"\"\n\n        if alleles is None:\n            alleles = self.supported_alleles\n\n        dfs = []\n        allele_to_allele_specific_models = {}\n        for allele in alleles:\n            df = pandas.DataFrame({\n                'model': self.allele_to_allele_specific_models[allele]\n            })\n            df[\"model_num\"] = df.index\n            df[\"allele\"] = allele\n            df[\"selected\"] = False\n\n            round_num = 1\n\n            while not df.selected.all() and sum(df.selected) < max_models:\n                score_col = \"score_%2d\" % round_num\n                prev_score_col = \"score_%2d\" % (round_num - 1)\n\n                existing_selected = list(df[df.selected].model)\n                df[score_col] = [\n                    numpy.nan if row.selected else\n                    score_function(\n                        Class1AffinityPredictor(\n                            allele_to_allele_specific_models={\n                                allele: [row.model] + existing_selected\n                            }\n                        )\n                    )\n                    for (_, row) in df.iterrows()\n                ]\n\n                if round_num > min_models and (\n                        df[score_col].max() < df[prev_score_col].max()):\n                    break\n\n                # In case of a tie, pick a model at random.\n                (best_model_index,) = df.loc[\n                    (df[score_col] == df[score_col].max())\n                ].sample(1).index\n                df.loc[best_model_index, \"selected\"] = True\n                round_num += 1\n\n            dfs.append(df)\n            allele_to_allele_specific_models[allele] = list(\n                df.loc[df.selected].model)\n\n        df = pandas.concat(dfs, ignore_index=True)\n\n        new_predictor = Class1AffinityPredictor(\n            allele_to_allele_specific_models,\n            metadata_dataframes={\n                \"model_selection\": df,\n            })\n        return new_predictor\n"
  },
  {
    "path": "mhcflurry/class1_neural_network.py",
    "content": "\"\"\"\nClass1NeuralNetwork - PyTorch implementation for MHC class I binding prediction.\n\"\"\"\n\nimport gc\nimport time\nimport collections\nimport json\nimport weakref\nimport itertools\nimport os\nimport logging\n\nimport numpy\nimport pandas\nimport torch\nimport torch.nn as nn\n\nfrom .hyperparameters import HyperparameterDefaults\nfrom .encodable_sequences import EncodableSequences, EncodingError\nfrom .allele_encoding import AlleleEncoding\nfrom .regression_target import to_ic50, from_ic50\nfrom .common import get_pytorch_device\nfrom .pytorch_layers import LocallyConnected1D, get_activation\nfrom .pytorch_losses import get_pytorch_loss\nfrom .data_dependent_weights_initialization import lsuv_init\nfrom .random_negative_peptides import RandomNegativePeptides\n\n\nDEFAULT_PREDICT_BATCH_SIZE = 4096\nif os.environ.get(\"MHCFLURRY_DEFAULT_PREDICT_BATCH_SIZE\"):\n    DEFAULT_PREDICT_BATCH_SIZE = int(os.environ[\"MHCFLURRY_DEFAULT_PREDICT_BATCH_SIZE\"])\n    logging.info(\n        \"Configured default predict batch size: %d\" % DEFAULT_PREDICT_BATCH_SIZE\n    )\n\n\nKERAS_BATCH_NORM_EPSILON = 1e-3\n# Keras uses moving = moving * 0.99 + batch * 0.01. PyTorch's momentum is the\n# new-batch coefficient, so the equivalent value is 0.01.\nKERAS_BATCH_NORM_MOMENTUM = 0.01\n\n\nclass Class1NeuralNetworkModel(nn.Module):\n    \"\"\"\n    PyTorch module for Class1 neural network.\n    \"\"\"\n\n    def __init__(\n            self,\n            peptide_encoding_shape,\n            allele_representations=None,\n            locally_connected_layers=None,\n            peptide_dense_layer_sizes=None,\n            allele_dense_layer_sizes=None,\n            layer_sizes=None,\n            peptide_allele_merge_method=\"multiply\",\n            peptide_allele_merge_activation=\"\",\n            activation=\"tanh\",\n            output_activation=\"sigmoid\",\n            dropout_probability=0.0,\n            batch_normalization=False,\n            dense_layer_l1_regularization=0.001,\n            dense_layer_l2_regularization=0.0,\n            topology=\"feedforward\",\n            num_outputs=1,\n            init=\"glorot_uniform\"):\n        super(Class1NeuralNetworkModel, self).__init__()\n\n        self.peptide_encoding_shape = peptide_encoding_shape\n        self.has_allele = allele_representations is not None\n        self.peptide_allele_merge_method = peptide_allele_merge_method\n        self.peptide_allele_merge_activation = peptide_allele_merge_activation\n        self.dropout_probability = dropout_probability\n        self.topology = topology\n        self.num_outputs = num_outputs\n        self.activation_name = activation\n        self.output_activation_name = output_activation\n\n        if locally_connected_layers is None:\n            locally_connected_layers = []\n        if peptide_dense_layer_sizes is None:\n            peptide_dense_layer_sizes = []\n        if allele_dense_layer_sizes is None:\n            allele_dense_layer_sizes = []\n        if layer_sizes is None:\n            layer_sizes = [32]\n\n        # Build locally connected layers\n        self.lc_layers = nn.ModuleList()\n        input_length = peptide_encoding_shape[0]\n        in_channels = peptide_encoding_shape[1]\n\n        for i, lc_params in enumerate(locally_connected_layers):\n            filters = lc_params.get('filters', 8)\n            kernel_size = lc_params.get('kernel_size', 3)\n            lc_activation = lc_params.get('activation', 'tanh')\n\n            lc_layer = LocallyConnected1D(\n                in_channels=in_channels,\n                out_channels=filters,\n                input_length=input_length,\n                kernel_size=kernel_size,\n                activation=lc_activation\n            )\n            self.lc_layers.append(lc_layer)\n            in_channels = filters\n            input_length = lc_layer.output_length\n\n        # Flattened size after locally connected layers\n        self.flatten_size = input_length * in_channels\n\n        # Peptide dense layers\n        self.peptide_dense_layers = nn.ModuleList()\n        peptide_layer_input = self.flatten_size\n        for i, size in enumerate(peptide_dense_layer_sizes):\n            layer = nn.Linear(peptide_layer_input, size)\n            self.peptide_dense_layers.append(layer)\n            peptide_layer_input = size\n\n        # Batch normalization after peptide processing (early)\n        self.batch_norm_early = None\n        if batch_normalization:\n            self.batch_norm_early = nn.BatchNorm1d(\n                peptide_layer_input,\n                eps=KERAS_BATCH_NORM_EPSILON,\n                momentum=KERAS_BATCH_NORM_MOMENTUM,\n            )\n\n        # Allele embedding and processing\n        self.allele_embedding = None\n        self.allele_dense_layers = nn.ModuleList()\n        allele_output_size = 0\n\n        if self.has_allele:\n            num_alleles = allele_representations.shape[0]\n            embedding_dim = numpy.prod(allele_representations.shape[1:])\n\n            self.allele_embedding = nn.Embedding(\n                num_embeddings=num_alleles,\n                embedding_dim=embedding_dim\n            )\n            # Set embedding weights and freeze\n            self.allele_embedding.weight.data = torch.from_numpy(\n                allele_representations.reshape(num_alleles, -1).astype(numpy.float32)\n            )\n            self.allele_embedding.weight.requires_grad = False\n\n            allele_layer_input = embedding_dim\n            for i, size in enumerate(allele_dense_layer_sizes):\n                layer = nn.Linear(allele_layer_input, size)\n                self.allele_dense_layers.append(layer)\n                allele_layer_input = size\n            allele_output_size = allele_layer_input\n\n        # Compute merged size\n        if self.has_allele:\n            if peptide_allele_merge_method == \"concatenate\":\n                merged_size = peptide_layer_input + allele_output_size\n            elif peptide_allele_merge_method == \"multiply\":\n                # Both must have the same size for multiply\n                merged_size = peptide_layer_input\n            else:\n                raise ValueError(f\"Unknown merge method: {peptide_allele_merge_method}\")\n        else:\n            merged_size = peptide_layer_input\n\n        # Merge activation\n        self.merge_activation = get_activation(peptide_allele_merge_activation)\n\n        # Main dense layers\n        self.dense_layers = nn.ModuleList()\n        self.batch_norms = nn.ModuleList()\n        self.dropouts = nn.ModuleList()\n\n        # For DenseNet topology, track input sizes for skip connections\n        self.merged_size = merged_size\n        current_size = merged_size\n        prev_sizes = []  # Track previous layer output sizes for skip connections\n\n        for i, size in enumerate(layer_sizes):\n            # For DenseNet topology (with-skip-connections):\n            # - Layer 0: input = merged_size\n            # - Layer 1: input = merged_size + layer_sizes[0] (skip from input)\n            # - Layer 2+: input = layer_sizes[i-2] + layer_sizes[i-1] (skip from 2 layers back)\n            if topology == \"with-skip-connections\" and i > 0:\n                if i == 1:\n                    # Skip from original merged input\n                    current_size = merged_size + prev_sizes[-1]\n                else:\n                    # Skip from 2 layers back\n                    current_size = prev_sizes[-2] + prev_sizes[-1]\n\n            layer = nn.Linear(current_size, size)\n            self.dense_layers.append(layer)\n\n            if batch_normalization:\n                self.batch_norms.append(nn.BatchNorm1d(\n                    size,\n                    eps=KERAS_BATCH_NORM_EPSILON,\n                    momentum=KERAS_BATCH_NORM_MOMENTUM,\n                ))\n            else:\n                self.batch_norms.append(None)\n\n            if dropout_probability > 0:\n                # Dropout probability in MHCflurry hyperparameters is keep-probability.\n                drop_prob = max(0.0, 1.0 - dropout_probability)\n                if drop_prob > 0:\n                    self.dropouts.append(nn.Dropout(p=drop_prob))\n                else:\n                    self.dropouts.append(None)\n            else:\n                self.dropouts.append(None)\n\n            prev_sizes.append(size)\n            current_size = size\n\n        # Note: For DenseNet topology, output layer receives only the last hidden layer output\n        # (skip connections are only between hidden layers, not to the output layer)\n\n        # Output layer\n        self.output_layer = nn.Linear(current_size, num_outputs)\n\n        # Activation functions\n        self.activation = get_activation(activation)\n        self.output_activation = get_activation(output_activation)\n\n        # Initialize weights\n        self._initialize_weights(init)\n\n    def _initialize_weights(self, init):\n        \"\"\"Initialize layer weights.\"\"\"\n        for module in self.modules():\n            if isinstance(module, nn.Linear):\n                if init == \"glorot_uniform\":\n                    nn.init.xavier_uniform_(module.weight)\n                elif init == \"glorot_normal\":\n                    nn.init.xavier_normal_(module.weight)\n                elif init == \"he_uniform\":\n                    nn.init.kaiming_uniform_(module.weight)\n                elif init == \"he_normal\":\n                    nn.init.kaiming_normal_(module.weight)\n                if module.bias is not None:\n                    nn.init.zeros_(module.bias)\n\n    def forward(self, inputs):\n        \"\"\"\n        Forward pass.\n\n        Parameters\n        ----------\n        inputs : dict\n            Dictionary with 'peptide' and optionally 'allele' keys\n\n        Returns\n        -------\n        torch.Tensor\n            Predictions of shape (batch, num_outputs)\n        \"\"\"\n        peptide = inputs['peptide']\n\n        # Locally connected layers\n        x = peptide\n        for lc_layer in self.lc_layers:\n            x = lc_layer(x)\n\n        # Flatten\n        x = x.reshape(x.size(0), -1)\n\n        # Peptide dense layers\n        for layer in self.peptide_dense_layers:\n            x = layer(x)\n            if self.activation is not None:\n                x = self.activation(x)\n\n        # Early batch normalization\n        if self.batch_norm_early is not None:\n            x = self.batch_norm_early(x)\n\n        # Allele processing and merge\n        if self.has_allele and 'allele' in inputs:\n            allele_idx = inputs['allele'].long()\n            # Handle case where input might be (batch,) or (batch, 1)\n            if allele_idx.dim() > 1:\n                allele_idx = allele_idx.squeeze(-1)\n            allele_embed = self.allele_embedding(allele_idx)\n\n            # Allele dense layers\n            for layer in self.allele_dense_layers:\n                allele_embed = layer(allele_embed)\n                if self.activation is not None:\n                    allele_embed = self.activation(allele_embed)\n\n            # Flatten allele embedding\n            allele_embed = allele_embed.reshape(allele_embed.size(0), -1)\n\n            # Merge\n            if self.peptide_allele_merge_method == \"concatenate\":\n                x = torch.cat([x, allele_embed], dim=-1)\n            elif self.peptide_allele_merge_method == \"multiply\":\n                x = x * allele_embed\n\n            # Merge activation\n            if self.merge_activation is not None:\n                x = self.merge_activation(x)\n\n        # Main dense layers (with optional skip connections for DenseNet topology)\n        prev_outputs = []  # Track outputs for skip connections\n        merged_input = x  # Save for DenseNet skip connections\n\n        for i, layer in enumerate(self.dense_layers):\n            # For DenseNet topology, concatenate skip connections\n            if self.topology == \"with-skip-connections\" and i > 0:\n                if i == 1:\n                    # Skip from original merged input\n                    x = torch.cat([merged_input, prev_outputs[-1]], dim=-1)\n                else:\n                    # Skip from 2 layers back\n                    x = torch.cat([prev_outputs[-2], prev_outputs[-1]], dim=-1)\n\n            x = layer(x)\n            if self.activation is not None:\n                x = self.activation(x)\n            if self.batch_norms[i] is not None:\n                x = self.batch_norms[i](x)\n            if self.dropouts[i] is not None:\n                x = self.dropouts[i](x)\n\n            prev_outputs.append(x)\n\n        # Note: For DenseNet topology, output layer receives only the last hidden layer output\n        # (skip connections are only between hidden layers, not to the output layer)\n\n        # Output\n        output = self.output_layer(x)\n        if self.output_activation is not None:\n            output = self.output_activation(output)\n\n        return output\n\n    def get_weights_list(self):\n        \"\"\"\n        Get weights as a list of numpy arrays (for compatibility with NPZ format).\n\n        Returns\n        -------\n        list of numpy.ndarray\n        \"\"\"\n        weights = []\n        for name, param in self.named_parameters():\n            weights.append(param.detach().cpu().numpy())\n        # Also include buffers (running mean/var for batch norm)\n        for name, buffer in self.named_buffers():\n            weights.append(buffer.detach().cpu().numpy())\n        return weights\n\n    def set_weights_list(self, weights, auto_convert_keras=True):\n        \"\"\"\n        Set weights from a list of numpy arrays.\n\n        Supports automatic detection and conversion of Keras-format weights\n        to PyTorch format for backward compatibility with pre-trained models.\n\n        Parameters\n        ----------\n        weights : list of numpy.ndarray\n        auto_convert_keras : bool\n            If True, automatically detect and convert Keras-format weights\n        \"\"\"\n        if auto_convert_keras and getattr(self, \"_keras_config\", None):\n            keras_layers = self._keras_config.get(\"config\", {}).get(\"layers\", [])\n            idx = 0\n\n            def assign_dense(layer, w, b):\n                w = w.astype(numpy.float32)\n                b = b.astype(numpy.float32)\n                if w.shape == layer.weight.shape[::-1] or (\n                    w.shape == layer.weight.shape and w.shape[0] == w.shape[1]\n                ):\n                    w = w.T\n                if w.shape != layer.weight.shape:\n                    raise ValueError(\n                        f\"Weight shape mismatch for {layer}: got {w.shape}, \"\n                        f\"expected {layer.weight.shape}\"\n                    )\n                if b.shape != layer.bias.shape:\n                    raise ValueError(\n                        f\"Bias shape mismatch for {layer}: got {b.shape}, \"\n                        f\"expected {layer.bias.shape}\"\n                    )\n                layer.weight.data = torch.from_numpy(w).to(\n                    device=layer.weight.device,\n                    dtype=layer.weight.dtype,\n                )\n                layer.bias.data = torch.from_numpy(b).to(\n                    device=layer.bias.device,\n                    dtype=layer.bias.dtype,\n                )\n\n            def assign_locally_connected(layer, w, b):\n                w = w.astype(numpy.float32)\n                b = b.astype(numpy.float32)\n                if len(w.shape) == 5 and w.shape[1] == 1:\n                    out_len, _, k, in_ch, out_ch = w.shape\n                    w = w.squeeze(1)\n                    w = w.reshape(out_len, k * in_ch, out_ch)\n                    w = w.transpose(0, 2, 1)\n                elif len(w.shape) == 3 and w.shape[0] == layer.output_length:\n                    # Keras (out_len, k*in_ch, out_ch) -> PyTorch (out_len, out_ch, in_ch*k)\n                    if w.shape[1] == layer.weight.shape[2] and w.shape[2] == layer.weight.shape[1]:\n                        w = w.transpose(0, 2, 1)\n                    else:\n                        kernel_size = layer.kernel_size\n                        out_len = w.shape[0]\n                        k_times_in_ch = w.shape[1]\n                        out_ch = w.shape[2]\n                        in_ch = k_times_in_ch // kernel_size\n                        w = w.reshape(out_len, kernel_size, in_ch, out_ch)\n                        w = w.transpose(0, 2, 1, 3)\n                        w = w.reshape(out_len, in_ch * kernel_size, out_ch)\n                        w = w.transpose(0, 2, 1)\n                if w.shape != layer.weight.shape:\n                    raise ValueError(\n                        f\"Weight shape mismatch for {layer}: got {w.shape}, \"\n                        f\"expected {layer.weight.shape}\"\n                    )\n                if b.shape != layer.bias.shape:\n                    raise ValueError(\n                        f\"Bias shape mismatch for {layer}: got {b.shape}, \"\n                        f\"expected {layer.bias.shape}\"\n                    )\n                layer.weight.data = torch.from_numpy(w).to(\n                    device=layer.weight.device,\n                    dtype=layer.weight.dtype,\n                )\n                layer.bias.data = torch.from_numpy(b).to(\n                    device=layer.bias.device,\n                    dtype=layer.bias.dtype,\n                )\n\n            def assign_batch_norm(layer, gamma, beta, mean, var):\n                layer.weight.data = torch.from_numpy(\n                    gamma.astype(numpy.float32)\n                ).to(device=layer.weight.device, dtype=layer.weight.dtype)\n                layer.bias.data = torch.from_numpy(\n                    beta.astype(numpy.float32)\n                ).to(device=layer.bias.device, dtype=layer.bias.dtype)\n                layer.running_mean.data = torch.from_numpy(\n                    mean.astype(numpy.float32)\n                ).to(\n                    device=layer.running_mean.device,\n                    dtype=layer.running_mean.dtype,\n                )\n                layer.running_var.data = torch.from_numpy(\n                    var.astype(numpy.float32)\n                ).to(\n                    device=layer.running_var.device,\n                    dtype=layer.running_var.dtype,\n                )\n\n            skip_keras_embedding = False\n            keras_metadata = getattr(self, \"_keras_metadata\", None)\n            if keras_metadata and keras_metadata.get(\"skip_embedding_weights\", False):\n                skip_keras_embedding = True\n\n            for layer in keras_layers:\n                layer_class = layer.get(\"class_name\", \"\")\n                layer_name = layer.get(\"config\", {}).get(\"name\", \"\")\n\n                if layer_class == \"Dense\":\n                    w = weights[idx]\n                    b = weights[idx + 1]\n                    idx += 2\n                    if layer_name == \"output\":\n                        assign_dense(self.output_layer, w, b)\n                    elif layer_name.startswith(\"dense_\"):\n                        dense_idx = int(layer_name.split(\"_\")[1])\n                        assign_dense(self.dense_layers[dense_idx], w, b)\n                    elif layer_name.startswith(\"peptide_dense_\"):\n                        dense_idx = int(layer_name.split(\"_\")[2])\n                        assign_dense(self.peptide_dense_layers[dense_idx], w, b)\n                    elif layer_name.startswith(\"allele_dense_\"):\n                        dense_idx = int(layer_name.split(\"_\")[2])\n                        assign_dense(self.allele_dense_layers[dense_idx], w, b)\n                elif layer_class == \"LocallyConnected1D\":\n                    w = weights[idx]\n                    b = weights[idx + 1]\n                    idx += 2\n                    lc_idx = int(layer_name.split(\"_\")[1])\n                    assign_locally_connected(self.lc_layers[lc_idx], w, b)\n                elif layer_class == \"Embedding\":\n                    w = weights[idx]\n                    idx += 1\n                    if skip_keras_embedding:\n                        continue\n                    if self.allele_embedding is None:\n                        continue\n                    if w.shape == self.allele_embedding.weight.shape:\n                        target = self.allele_embedding.weight\n                        self.allele_embedding.weight.data = torch.from_numpy(\n                            w.astype(numpy.float32)\n                        ).to(device=target.device, dtype=target.dtype)\n                elif layer_class == \"BatchNormalization\":\n                    gamma = weights[idx]\n                    beta = weights[idx + 1]\n                    mean = weights[idx + 2]\n                    var = weights[idx + 3]\n                    idx += 4\n                    if layer_name == \"batch_norm_early\":\n                        if self.batch_norm_early is not None:\n                            assign_batch_norm(self.batch_norm_early, gamma, beta, mean, var)\n                    elif layer_name.startswith(\"batch_norm_\"):\n                        bn_idx = int(layer_name.split(\"_\")[2])\n                        if self.batch_norms[bn_idx] is not None:\n                            assign_batch_norm(self.batch_norms[bn_idx], gamma, beta, mean, var)\n                else:\n                    continue\n\n            return\n        idx = 0\n\n        # Check for keras metadata to know if we need to skip embedding weights\n        keras_metadata = getattr(self, '_keras_metadata', None)\n        skip_keras_embedding = False\n        if keras_metadata and keras_metadata.get('skip_embedding_weights', False):\n            skip_keras_embedding = True\n\n        named_modules = dict(self.named_modules()) if auto_convert_keras else {}\n\n        for name, param in self.named_parameters():\n            # Skip allele_embedding when loading Keras weights with placeholder\n            if skip_keras_embedding and 'allele_embedding' in name:\n                # Also skip the corresponding placeholder weight in the weights list\n                # Placeholder embeddings have shape (0, embed_dim)\n                while idx < len(weights) and len(weights[idx].shape) == 2 and weights[idx].shape[0] == 0:\n                    idx += 1\n                continue\n            w = weights[idx].astype(numpy.float32)\n            extra_keras_skip = 0\n            module = None\n            if auto_convert_keras and \".\" in name:\n                module_name = name.rsplit(\".\", 1)[0]\n                module = named_modules.get(module_name)\n\n            # Skip allele_embedding if shapes don't match (pan-allele models)\n            # The embedding will be set by set_allele_representations later\n            if 'allele_embedding' in name and w.shape != param.shape:\n                # Advance index past this weight\n                idx += 1\n                continue\n\n            # Auto-detect and convert Keras weights\n            if auto_convert_keras:\n                # Dense/Linear layer: Keras (in, out) -> PyTorch (out, in)\n                # Note: Must transpose even when shapes match (square matrices)\n                # Check for weight (not bias) by looking at param name\n                is_linear_weight = ('weight' in name and\n                                    'embedding' not in name and\n                                    len(w.shape) == 2)\n                if is_linear_weight and (w.shape == param.shape[::-1] or\n                        (w.shape == param.shape and w.shape[0] == w.shape[1])):\n                    w = w.T\n                # LocallyConnected1D weight: Keras (out_len, 1, k, in_ch, out_ch)\n                # -> PyTorch (out_len, out_ch, in_ch * k)\n                elif len(w.shape) == 5 and w.shape[1] == 1:\n                    out_len, _, k, in_ch, out_ch = w.shape\n                    w = w.squeeze(1)  # (out_len, k, in_ch, out_ch)\n                    w = w.reshape(out_len, k * in_ch, out_ch)\n                    w = w.transpose(0, 2, 1)  # (out_len, out_ch, k * in_ch)\n                # LocallyConnected1D weight (3D): Keras (out_len, k*in_ch, out_ch)\n                # -> PyTorch (out_len, out_ch, in_ch*k)\n                # Note: Keras stores kernel_positions as outer loop, channels as inner\n                # PyTorch unfold produces channels as outer loop, kernel_positions as inner\n                elif len(w.shape) == 3 and w.shape[0] == param.shape[0] and \\\n                        w.shape[1] == param.shape[2] and w.shape[2] == param.shape[1]:\n                    # LocallyConnected1D weight (3D): Keras (out_len, k*in_ch, out_ch)\n                    # -> PyTorch (out_len, out_ch, k*in_ch)\n                    w = w.transpose(0, 2, 1)\n                # LocallyConnected1D bias: Keras (out_len * out_ch,) -> PyTorch (out_len, out_ch)\n                elif len(w.shape) == 1 and len(param.shape) == 2 and \\\n                        w.shape[0] == param.shape[0] * param.shape[1]:\n                    w = w.reshape(param.shape)\n                # BatchNorm: Keras provides gamma, beta, moving_mean, moving_var.\n                # PyTorch exposes gamma/beta as params and moving stats as buffers.\n                if module is not None and isinstance(module, torch.nn.BatchNorm1d):\n                    if name.endswith(\"bias\") and idx + 2 < len(weights):\n                        running_mean = weights[idx + 1].astype(numpy.float32)\n                        running_var = weights[idx + 2].astype(numpy.float32)\n                        if module.running_mean.shape == running_mean.shape:\n                            module.running_mean.data = torch.from_numpy(\n                                running_mean\n                            ).to(\n                                device=module.running_mean.device,\n                                dtype=module.running_mean.dtype,\n                            )\n                        if module.running_var.shape == running_var.shape:\n                            module.running_var.data = torch.from_numpy(\n                                running_var\n                            ).to(\n                                device=module.running_var.device,\n                                dtype=module.running_var.dtype,\n                            )\n                        extra_keras_skip = 2\n\n            if w.shape != param.shape:\n                raise ValueError(\n                    f\"Weight shape mismatch for {name}: \"\n                    f\"got {weights[idx].shape}, expected {param.shape}\"\n                )\n\n            param.data = torch.from_numpy(w).to(\n                device=param.device,\n                dtype=param.dtype,\n            )\n            idx += 1 + extra_keras_skip\n        if not auto_convert_keras:\n            named_modules_dict = dict(self.named_modules())\n            for name, buffer in self.named_buffers():\n                tensor = torch.from_numpy(weights[idx]).to(\n                    device=buffer.device,\n                    dtype=buffer.dtype,\n                )\n                # Navigate to the correct submodule for nested buffers\n                if \".\" in name:\n                    module_path, buffer_name = name.rsplit(\".\", 1)\n                    named_modules_dict[module_path]._buffers[buffer_name] = tensor\n                else:\n                    self._buffers[name] = tensor\n                idx += 1\n\n    def to_json(self):\n        \"\"\"\n        Serialize model configuration to JSON string.\n\n        Returns\n        -------\n        str\n            JSON representation of model configuration\n        \"\"\"\n        import json\n\n        # Extract layer configurations\n        lc_layers_config = []\n        for lc_layer in self.lc_layers:\n            lc_layers_config.append({\n                'in_channels': lc_layer.in_channels,\n                'out_channels': lc_layer.out_channels,\n                'kernel_size': lc_layer.kernel_size,\n                'input_length': lc_layer.input_length,\n                'output_length': lc_layer.output_length,\n                'activation': lc_layer.activation_name,\n            })\n\n        peptide_dense_sizes = [\n            layer.out_features for layer in self.peptide_dense_layers\n        ]\n        allele_dense_sizes = [\n            layer.out_features for layer in self.allele_dense_layers\n        ]\n        layer_sizes = [\n            layer.out_features for layer in self.dense_layers\n        ]\n\n        config = {\n            'class': 'Class1NeuralNetworkModel',\n            'peptide_encoding_shape': list(self.peptide_encoding_shape),\n            'has_allele': self.has_allele,\n            'peptide_allele_merge_method': self.peptide_allele_merge_method,\n            'peptide_allele_merge_activation': self.peptide_allele_merge_activation,\n            'dropout_probability': self.dropout_probability,\n            'topology': self.topology,\n            'num_outputs': self.num_outputs,\n            'activation': self.activation_name,\n            'output_activation': self.output_activation_name,\n            'locally_connected_layers': lc_layers_config,\n            'peptide_dense_layer_sizes': peptide_dense_sizes,\n            'allele_dense_layer_sizes': allele_dense_sizes,\n            'layer_sizes': layer_sizes,\n            'batch_normalization': self.batch_norm_early is not None,\n        }\n\n        return json.dumps(config, sort_keys=True)\n\n\nclass Class1NeuralNetwork(object):\n    \"\"\"\n    Low level class I predictor consisting of a single neural network.\n\n    Both single allele and pan-allele prediction are supported.\n\n    Users will generally use Class1AffinityPredictor, which gives a higher-level\n    interface and supports ensembles.\n    \"\"\"\n\n    network_hyperparameter_defaults = HyperparameterDefaults(\n        allele_amino_acid_encoding=\"BLOSUM62\",\n        allele_dense_layer_sizes=[],\n        peptide_encoding={\n            \"vector_encoding_name\": \"BLOSUM62\",\n            \"alignment_method\": \"pad_middle\",\n            \"left_edge\": 4,\n            \"right_edge\": 4,\n            \"max_length\": 15,\n        },\n        peptide_dense_layer_sizes=[],\n        peptide_allele_merge_method=\"multiply\",\n        peptide_allele_merge_activation=\"\",\n        layer_sizes=[32],\n        dense_layer_l1_regularization=0.001,\n        dense_layer_l2_regularization=0.0,\n        activation=\"tanh\",\n        init=\"glorot_uniform\",\n        output_activation=\"sigmoid\",\n        dropout_probability=0.0,\n        batch_normalization=False,\n        locally_connected_layers=[\n            {\"filters\": 8, \"activation\": \"tanh\", \"kernel_size\": 3}\n        ],\n        topology=\"feedforward\",\n        num_outputs=1,\n    )\n    \"\"\"\n    Hyperparameters (and their default values) that affect the neural network\n    architecture.\n    \"\"\"\n\n    compile_hyperparameter_defaults = HyperparameterDefaults(\n        loss=\"custom:mse_with_inequalities\",\n        optimizer=\"rmsprop\",\n        learning_rate=None,\n    )\n    \"\"\"\n    Loss and optimizer hyperparameters.\n    \"\"\"\n\n    fit_hyperparameter_defaults = HyperparameterDefaults(\n        max_epochs=500,\n        validation_split=0.1,\n        early_stopping=True,\n        minibatch_size=128,\n        data_dependent_initialization_method=None,\n        random_negative_affinity_min=20000.0,\n        random_negative_affinity_max=50000.0,\n        random_negative_output_indices=None,\n    ).extend(RandomNegativePeptides.hyperparameter_defaults)\n    \"\"\"\n    Hyperparameters for neural network training.\n    \"\"\"\n\n    early_stopping_hyperparameter_defaults = HyperparameterDefaults(\n        patience=20,\n        min_delta=0.0,\n    )\n    \"\"\"\n    Hyperparameters for early stopping.\n    \"\"\"\n\n    miscelaneous_hyperparameter_defaults = HyperparameterDefaults(\n        train_data={},\n    )\n    \"\"\"\n    Miscelaneous hyperaparameters. These parameters are not used by this class\n    but may be interpreted by other code.\n    \"\"\"\n\n    hyperparameter_defaults = (\n        network_hyperparameter_defaults.extend(compile_hyperparameter_defaults)\n        .extend(fit_hyperparameter_defaults)\n        .extend(early_stopping_hyperparameter_defaults)\n        .extend(miscelaneous_hyperparameter_defaults)\n    )\n    \"\"\"\n    Combined set of all supported hyperparameters and their default values.\n    \"\"\"\n\n    # Hyperparameter renames.\n    hyperparameter_renames = {\n        \"use_embedding\": None,\n        \"pseudosequence_use_embedding\": None,\n        \"monitor\": None,\n        \"min_delta\": None,\n        \"verbose\": None,\n        \"mode\": None,\n        \"take_best_epoch\": None,\n        \"kmer_size\": None,\n        \"peptide_amino_acid_encoding\": None,\n        \"embedding_input_dim\": None,\n        \"embedding_output_dim\": None,\n        \"embedding_init_method\": None,\n        \"left_edge\": None,\n        \"right_edge\": None,\n    }\n\n    @classmethod\n    def apply_hyperparameter_renames(cls, hyperparameters):\n        \"\"\"\n        Handle hyperparameter renames.\n\n        Parameters\n        ----------\n        hyperparameters : dict\n\n        Returns\n        -------\n        dict : updated hyperparameters\n\n        \"\"\"\n        for from_name, to_name in cls.hyperparameter_renames.items():\n            if from_name in hyperparameters:\n                value = hyperparameters.pop(from_name)\n                if to_name:\n                    hyperparameters[to_name] = value\n        return hyperparameters\n\n    def __init__(self, **hyperparameters):\n        self.hyperparameters = self.hyperparameter_defaults.with_defaults(\n            self.apply_hyperparameter_renames(hyperparameters)\n        )\n\n        self._network = None\n        self.network_json = None\n        self.network_weights = None\n        self.network_weights_loader = None\n\n        self.fit_info = []\n        self.prediction_cache = weakref.WeakKeyDictionary()\n\n    MODELS_CACHE = {}\n    \"\"\"\n    Process-wide model cache, a map from: architecture JSON string to\n    (PyTorch model, existing network weights)\n    \"\"\"\n\n    @classmethod\n    def clear_model_cache(klass):\n        \"\"\"\n        Clear the model cache.\n        \"\"\"\n        klass.MODELS_CACHE.clear()\n\n    @classmethod\n    def borrow_cached_network(klass, network_json, network_weights):\n        \"\"\"\n        Return a PyTorch model with the specified architecture and weights.\n        As an optimization, when possible this will reuse architectures from a\n        process-wide cache.\n\n        Parameters\n        ----------\n        network_json : string of JSON\n        network_weights : list of numpy.array\n\n        Returns\n        -------\n        Class1NeuralNetworkModel\n        \"\"\"\n        assert network_weights is not None\n        key = klass.model_cache_key(network_json)\n        config = json.loads(network_json)\n        # Detect if weights are from Keras or PyTorch format\n        # Keras JSON has 'class_name': 'Model' or 'Functional'; PyTorch has 'hyperparameters'\n        is_keras_format = config.get('class_name') in ('Model', 'Functional')\n\n        if key not in klass.MODELS_CACHE:\n            # Cache miss - create new model\n            network = klass._create_model_from_config(config)\n            existing_weights = None\n        else:\n            # Cache hit\n            (network, existing_weights) = klass.MODELS_CACHE[key]\n\n        if existing_weights is not network_weights:\n            network.set_weights_list(network_weights, auto_convert_keras=is_keras_format)\n            klass.MODELS_CACHE[key] = (network, network_weights)\n\n        return network\n\n    @classmethod\n    def _parse_keras_json_config(cls, config):\n        \"\"\"\n        Parse a legacy Keras model JSON config to extract hyperparameters.\n\n        Parameters\n        ----------\n        config : dict\n            Keras model JSON config with 'class_name', 'config', etc.\n\n        Returns\n        -------\n        tuple of (dict, dict)\n            First dict: Hyperparameters dict compatible with Class1NeuralNetwork\n            Second dict: Metadata about Keras model structure (e.g., embedding info)\n        \"\"\"\n        layers = config.get('config', {}).get('layers', [])\n\n        hyperparameters = {\n            'locally_connected_layers': [],\n            'layer_sizes': [],\n            'activation': 'tanh',\n            'output_activation': 'sigmoid',\n            'dropout_probability': 0.0,\n            'batch_normalization': False,\n            'dense_layer_l1_regularization': 0.001,\n            'dense_layer_l2_regularization': 0.0,\n            'peptide_allele_merge_method': 'multiply',  # Default\n        }\n\n        # Metadata about Keras structure\n        keras_metadata = {\n            'has_embedding': False,\n            'embedding_input_dim': 0,\n            'embedding_output_dim': 0,\n            'skip_embedding_weights': False,\n        }\n\n        dense_layers = []\n        peptide_dense_sizes = []\n        allele_dense_sizes = []\n        concatenate_count = 0\n        for layer in layers:\n            layer_class = layer.get('class_name', '')\n            layer_config = layer.get('config', {})\n\n            if layer_class == 'LocallyConnected1D':\n                lc_config = {\n                    'filters': layer_config.get('filters', 8),\n                    'kernel_size': layer_config.get('kernel_size', [3])[0] if isinstance(\n                        layer_config.get('kernel_size', [3]), list\n                    ) else layer_config.get('kernel_size', 3),\n                    'activation': layer_config.get('activation', 'tanh'),\n                }\n                hyperparameters['locally_connected_layers'].append(lc_config)\n                hyperparameters['activation'] = lc_config['activation']\n\n            elif layer_class == 'Dense':\n                units = layer_config.get('units', 32)\n                activation = layer_config.get('activation', 'tanh')\n                layer_name = layer_config.get('name', '')\n                if layer_name.startswith('peptide_dense_'):\n                    peptide_dense_sizes.append(units)\n                elif layer_name.startswith('allele_dense_'):\n                    allele_dense_sizes.append(units)\n                else:\n                    dense_layers.append({'units': units, 'activation': activation})\n\n                # Extract regularization from first dense layer\n                kernel_reg = layer_config.get('kernel_regularizer')\n                if kernel_reg and isinstance(kernel_reg, dict):\n                    reg_config = kernel_reg.get('config', {})\n                    if 'l1' in reg_config:\n                        hyperparameters['dense_layer_l1_regularization'] = reg_config['l1']\n                    if 'l2' in reg_config:\n                        hyperparameters['dense_layer_l2_regularization'] = reg_config['l2']\n\n            elif layer_class == 'Dropout':\n                rate = layer_config.get('rate', 0.0)\n                hyperparameters['dropout_probability'] = 1.0 - rate\n\n            elif layer_class == 'BatchNormalization':\n                hyperparameters['batch_normalization'] = True\n\n            elif layer_class == 'Embedding':\n                keras_metadata['has_embedding'] = True\n                keras_metadata['embedding_input_dim'] = layer_config.get('input_dim', 0)\n                keras_metadata['embedding_output_dim'] = layer_config.get('output_dim', 0)\n                # If input_dim is 0, it's a placeholder and weights should be skipped\n                if layer_config.get('input_dim', 0) == 0:\n                    keras_metadata['skip_embedding_weights'] = True\n\n            elif layer_class == 'Concatenate':\n                concatenate_count += 1\n                # Only set merge_method to concatenate if there's just one Concatenate\n                # and it's likely for peptide-allele merging (not DenseNet skip connections)\n                if concatenate_count == 1:\n                    hyperparameters['peptide_allele_merge_method'] = 'concatenate'\n\n            elif layer_class == 'Multiply':\n                hyperparameters['peptide_allele_merge_method'] = 'multiply'\n\n        # Multiple Concatenate layers indicate DenseNet topology with skip connections\n        # Note: The first Concatenate is typically for peptide-allele merging,\n        # subsequent ones are for DenseNet skip connections\n        if concatenate_count > 1:\n            hyperparameters['topology'] = 'with-skip-connections'\n            # Keep the merge method as detected (concatenate from first Concatenate layer)\n\n        # The last Dense layer is the output layer\n        if dense_layers:\n            hyperparameters['output_activation'] = dense_layers[-1]['activation']\n            hyperparameters['num_outputs'] = dense_layers[-1]['units']\n            # All other Dense layers contribute to layer_sizes\n            hyperparameters['layer_sizes'] = [d['units'] for d in dense_layers[:-1]]\n            if dense_layers[:-1]:\n                hyperparameters['activation'] = dense_layers[0]['activation']\n\n        if peptide_dense_sizes:\n            hyperparameters['peptide_dense_layer_sizes'] = peptide_dense_sizes\n        if allele_dense_sizes:\n            hyperparameters['allele_dense_layer_sizes'] = allele_dense_sizes\n\n        return hyperparameters, keras_metadata\n\n    @classmethod\n    def _create_model_from_config(cls, config, instance_hyperparameters=None):\n        \"\"\"Create a model from a configuration dictionary.\n\n        Parameters\n        ----------\n        config : dict\n            Configuration dictionary (either Keras JSON or hyperparameters dict)\n        instance_hyperparameters : dict, optional\n            Hyperparameters from the Class1NeuralNetwork instance.\n            These take precedence for things like peptide_encoding.\n        \"\"\"\n        keras_metadata = None\n\n        # Check if this is a merged network config\n        if config.get('merged_networks'):\n            return cls._create_merged_model_from_config(config, instance_hyperparameters)\n\n        # Check if this is a legacy Keras JSON config\n        if config.get('class_name') in ('Model', 'Functional'):\n            hyperparameters, keras_metadata = cls._parse_keras_json_config(config)\n        else:\n            # Extract hyperparameters from config (new format)\n            hyperparameters = config.get('hyperparameters', config)\n\n        # Merge with instance hyperparameters if provided\n        # Instance hyperparameters take precedence for things like peptide_encoding\n        if instance_hyperparameters:\n            # Copy to avoid modifying original\n            merged = dict(instance_hyperparameters)\n            # Update with parsed hyperparameters (architecture-specific settings)\n            for key in ['layer_sizes', 'locally_connected_layers', 'dropout_probability',\n                        'batch_normalization', 'activation', 'output_activation',\n                        'peptide_allele_merge_method']:\n                if key in hyperparameters:\n                    merged[key] = hyperparameters[key]\n            hyperparameters = merged\n\n        # Create a temporary instance to get encoding shape\n        temp = cls(**hyperparameters)\n        peptide_encoding_shape = temp.peptides_to_network_input([]).shape[1:]\n\n        # Get allele representations if present\n        allele_representations = config.get('allele_representations')\n        if allele_representations is not None:\n            allele_representations = numpy.array(allele_representations)\n\n        # For pan-allele Keras models with placeholder embedding (input_dim=0),\n        # create a placeholder allele representation to ensure correct architecture\n        if (allele_representations is None and keras_metadata is not None\n                and keras_metadata.get('has_embedding', False)\n                and keras_metadata.get('embedding_output_dim', 0) > 0):\n            # Create placeholder with 1 allele and correct embedding dim\n            # This will be replaced by set_allele_representations later\n            embedding_dim = keras_metadata['embedding_output_dim']\n            allele_representations = numpy.zeros((1, embedding_dim), dtype=numpy.float32)\n\n        # For PyTorch-format configs without allele_representations but with\n        # allele_amino_acid_encoding (pan-allele models), create placeholder\n        # Check has_allele flag to distinguish pan-allele from allele-specific models\n        has_allele = config.get('has_allele', True)  # Default True for backward compat\n        if (allele_representations is None and keras_metadata is None\n                and has_allele and hyperparameters.get('allele_amino_acid_encoding')):\n            # Compute embedding dimension from encoding\n            from .amino_acid import ENCODING_DATA_FRAMES\n            encoding_name = hyperparameters['allele_amino_acid_encoding']\n            encoding_df = ENCODING_DATA_FRAMES.get(encoding_name)\n            if encoding_df is not None:\n                # Standard allele pseudosequence length is 37 amino acids\n                allele_seq_length = 37\n                embedding_dim = allele_seq_length * len(encoding_df.columns)\n                allele_representations = numpy.zeros((1, embedding_dim), dtype=numpy.float32)\n\n        model = Class1NeuralNetworkModel(\n            peptide_encoding_shape=peptide_encoding_shape,\n            allele_representations=allele_representations,\n            locally_connected_layers=hyperparameters.get('locally_connected_layers', []),\n            peptide_dense_layer_sizes=hyperparameters.get('peptide_dense_layer_sizes', []),\n            allele_dense_layer_sizes=hyperparameters.get('allele_dense_layer_sizes', []),\n            layer_sizes=hyperparameters.get('layer_sizes', [32]),\n            peptide_allele_merge_method=hyperparameters.get('peptide_allele_merge_method', 'multiply'),\n            peptide_allele_merge_activation=hyperparameters.get('peptide_allele_merge_activation', ''),\n            activation=hyperparameters.get('activation', 'tanh'),\n            output_activation=hyperparameters.get('output_activation', 'sigmoid'),\n            dropout_probability=hyperparameters.get('dropout_probability', 0.0),\n            batch_normalization=hyperparameters.get('batch_normalization', False),\n            dense_layer_l1_regularization=hyperparameters.get('dense_layer_l1_regularization', 0.001),\n            dense_layer_l2_regularization=hyperparameters.get('dense_layer_l2_regularization', 0.0),\n            topology=hyperparameters.get('topology', 'feedforward'),\n            num_outputs=hyperparameters.get('num_outputs', 1),\n            init=hyperparameters.get('init', 'glorot_uniform'),\n        )\n\n        # Store keras metadata and config for weight loading\n        if keras_metadata is not None:\n            model._keras_metadata = keras_metadata\n            model._keras_config = config\n\n        return model\n\n    @classmethod\n    def _create_merged_model_from_config(cls, config, instance_hyperparameters=None):\n        \"\"\"Create a merged model from a configuration dictionary.\n\n        Parameters\n        ----------\n        config : dict\n            Configuration dictionary with 'merged_networks' key\n        instance_hyperparameters : dict, optional\n            Hyperparameters from the Class1NeuralNetwork instance.\n        \"\"\"\n        merged_configs = config['merged_networks']\n        merge_method = config.get('merge_method', 'average')\n\n        # Create a temporary instance to get encoding shape\n        base_hyperparameters = config.get('hyperparameters', {})\n        if instance_hyperparameters:\n            base_hyperparameters = dict(instance_hyperparameters)\n            base_hyperparameters.update(config.get('hyperparameters', {}))\n        temp = cls(**base_hyperparameters)\n        peptide_encoding_shape = temp.peptides_to_network_input([]).shape[1:]\n\n        # Create placeholder allele representations for pan-allele models\n        allele_representations = None\n        if base_hyperparameters.get('allele_amino_acid_encoding'):\n            from .amino_acid import ENCODING_DATA_FRAMES\n            encoding_name = base_hyperparameters['allele_amino_acid_encoding']\n            encoding_df = ENCODING_DATA_FRAMES.get(encoding_name)\n            if encoding_df is not None:\n                allele_seq_length = 37\n                embedding_dim = allele_seq_length * len(encoding_df.columns)\n                allele_representations = numpy.zeros((1, embedding_dim), dtype=numpy.float32)\n\n        # Create sub-networks\n        sub_networks = []\n        for sub_config in merged_configs:\n            model = Class1NeuralNetworkModel(\n                peptide_encoding_shape=peptide_encoding_shape,\n                allele_representations=allele_representations,\n                locally_connected_layers=sub_config.get('locally_connected_layers', []),\n                peptide_dense_layer_sizes=sub_config.get('peptide_dense_layer_sizes', []),\n                allele_dense_layer_sizes=sub_config.get('allele_dense_layer_sizes', []),\n                layer_sizes=sub_config.get('layer_sizes', [32]),\n                peptide_allele_merge_method=sub_config.get('peptide_allele_merge_method', 'multiply'),\n                peptide_allele_merge_activation=sub_config.get('peptide_allele_merge_activation', ''),\n                activation=sub_config.get('activation', 'tanh'),\n                output_activation=sub_config.get('output_activation', 'sigmoid'),\n                dropout_probability=sub_config.get('dropout_probability', 0.0),\n                batch_normalization=sub_config.get('batch_normalization', False),\n                dense_layer_l1_regularization=base_hyperparameters.get('dense_layer_l1_regularization', 0.001),\n                dense_layer_l2_regularization=base_hyperparameters.get('dense_layer_l2_regularization', 0.0),\n                topology=sub_config.get('topology', 'feedforward'),\n                num_outputs=sub_config.get('num_outputs', 1),\n                init=base_hyperparameters.get('init', 'glorot_uniform'),\n            )\n            sub_networks.append(model)\n\n        return MergedClass1NeuralNetwork(sub_networks, merge_method=merge_method)\n\n    @staticmethod\n    def model_cache_key(network_json):\n        \"\"\"\n        Given a JSON description of a neural network, return a cache key.\n\n        Parameters\n        ----------\n        network_json : string\n\n        Returns\n        -------\n        string\n        \"\"\"\n        # Remove regularization settings as they don't affect predictions\n        def drop_properties(d):\n            if isinstance(d, dict):\n                d.pop('dense_layer_l1_regularization', None)\n                d.pop('dense_layer_l2_regularization', None)\n            return d\n\n        description = json.loads(network_json, object_hook=drop_properties)\n        return json.dumps(description)\n\n    @staticmethod\n    def keras_network_cache_key(network_json):\n        \"\"\"\n        Backward-compatible alias for ``model_cache_key``.\n        \"\"\"\n        return Class1NeuralNetwork.model_cache_key(network_json)\n\n    def network(self, borrow=False):\n        \"\"\"\n        Return the PyTorch model associated with this predictor.\n\n        Parameters\n        ----------\n        borrow : bool\n            Whether to return a cached model if possible\n\n        Returns\n        -------\n        Class1NeuralNetworkModel\n        \"\"\"\n        if self._network is None and self.network_json is not None:\n            self.load_weights()\n            if borrow:\n                return self.borrow_cached_network(\n                    self.network_json, self.network_weights\n                )\n            else:\n                config = json.loads(self.network_json)\n                # Detect if weights are from Keras or PyTorch format\n                # Keras JSON has 'class_name': 'Model' or 'Functional'; PyTorch has 'hyperparameters'\n                is_keras_format = config.get('class_name') in ('Model', 'Functional')\n                # Pass this instance's hyperparameters to preserve peptide_encoding etc.\n                self._network = self._create_model_from_config(\n                    config, instance_hyperparameters=self.hyperparameters)\n                if self.network_weights is not None:\n                    self._network.set_weights_list(\n                        self.network_weights,\n                        auto_convert_keras=is_keras_format\n                    )\n                self.network_json = None\n                self.network_weights = None\n        return self._network\n\n    def update_network_description(self):\n        \"\"\"\n        Update self.network_json and self.network_weights properties based on\n        this instances's neural network.\n        \"\"\"\n        if self._network is not None:\n            config = {\n                'hyperparameters': dict(self.hyperparameters),\n            }\n\n            # Check if this is a merged network\n            if isinstance(self._network, MergedClass1NeuralNetwork):\n                # Save sub-network configs for merged networks\n                sub_configs = []\n                for subnet in self._network.networks:\n                    sub_config = {}\n                    # Get the architecture info from the network itself\n                    sub_config['layer_sizes'] = [\n                        layer.out_features for layer in subnet.dense_layers\n                    ]\n                    sub_config['locally_connected_layers'] = [\n                        {'filters': layer.out_channels, 'kernel_size': layer.kernel_size}\n                        for layer in subnet.lc_layers\n                    ] if hasattr(subnet, 'lc_layers') else []\n                    sub_config['peptide_dense_layer_sizes'] = [\n                        layer.out_features for layer in subnet.peptide_dense_layers\n                    ] if hasattr(subnet, 'peptide_dense_layers') else []\n                    sub_config['allele_dense_layer_sizes'] = [\n                        layer.out_features for layer in subnet.allele_dense_layers\n                    ] if hasattr(subnet, 'allele_dense_layers') else []\n                    # MHCflurry hyperparameters use keep probability, not\n                    # PyTorch Dropout.p (drop probability).\n                    sub_config['dropout_probability'] = getattr(\n                        subnet,\n                        'dropout_probability',\n                        0.0,\n                    )\n                    sub_config['batch_normalization'] = (\n                        hasattr(subnet, 'batch_norms') and bool(subnet.batch_norms) and\n                        any(bn is not None for bn in subnet.batch_norms)\n                    )\n                    sub_config['activation'] = subnet.activation_name\n                    sub_config['output_activation'] = subnet.output_activation_name\n                    sub_config['peptide_allele_merge_method'] = subnet.peptide_allele_merge_method\n                    sub_config['peptide_allele_merge_activation'] = subnet.peptide_allele_merge_activation\n                    sub_config['topology'] = subnet.topology\n                    sub_config['num_outputs'] = subnet.output_layer.out_features\n                    sub_configs.append(sub_config)\n                config['merged_networks'] = sub_configs\n                config['merge_method'] = self._network.merge_method\n            else:\n                # Save whether the network has allele features\n                config['has_allele'] = getattr(self._network, 'has_allele', False)\n                # Save allele representations if present in the network\n                if hasattr(self._network, 'allele_embedding') and self._network.allele_embedding is not None:\n                    allele_embed = self._network.allele_embedding.weight.detach().cpu().numpy()\n                    config['allele_representations'] = allele_embed.tolist()\n\n            self.network_json = json.dumps(config)\n            self.network_weights = self._network.get_weights_list()\n\n    def get_config(self):\n        \"\"\"\n        serialize to a dict all attributes except model weights\n\n        Returns\n        -------\n        dict\n        \"\"\"\n        self.update_network_description()\n        result = dict(self.__dict__)\n        result[\"_network\"] = None\n        result[\"network_weights\"] = None\n        result[\"network_weights_loader\"] = None\n        result[\"prediction_cache\"] = None\n        return result\n\n    @classmethod\n    def from_config(cls, config, weights=None, weights_loader=None):\n        \"\"\"\n        deserialize from a dict returned by get_config().\n\n        Supports both:\n        - Native Class1NeuralNetwork configs with 'hyperparameters' key\n        - Legacy Keras model JSON configs with 'class_name', 'config', etc.\n\n        Parameters\n        ----------\n        config : dict\n        weights : list of array, optional\n            Network weights to restore\n        weights_loader : callable, optional\n            Function to call (no arguments) to load weights when needed\n\n        Returns\n        -------\n        Class1NeuralNetwork\n        \"\"\"\n        config = dict(config)\n\n        # Check if this is a legacy Keras JSON config\n        if config.get('class_name') in ('Model', 'Functional'):\n            hyperparameters, keras_metadata = cls._parse_keras_json_config(config)\n            instance = cls(**hyperparameters)\n            # Store metadata for weight loading\n            instance._keras_metadata = keras_metadata\n            # Store the original config as network_json for lazy network creation\n            instance.network_json = json.dumps(config)\n        else:\n            # Standard Class1NeuralNetwork config format\n            instance = cls(**config.pop(\"hyperparameters\"))\n            instance.__dict__.update(config)\n\n        instance.network_weights = weights\n        instance.network_weights_loader = weights_loader\n        instance.prediction_cache = weakref.WeakKeyDictionary()\n        return instance\n\n    def load_weights(self):\n        \"\"\"\n        Load weights by evaluating self.network_weights_loader, if needed.\n        \"\"\"\n        if self.network_weights_loader:\n            self.network_weights = self.network_weights_loader()\n            self.network_weights_loader = None\n\n    def get_weights(self):\n        \"\"\"\n        Get the network weights\n\n        Returns\n        -------\n        list of numpy.array giving weights for each layer or None if there is no\n        network\n        \"\"\"\n        self.update_network_description()\n        self.load_weights()\n        return self.network_weights\n\n    def get_weights_list(self):\n        \"\"\"\n        Get the network weights as a list of numpy arrays.\n\n        Returns\n        -------\n        list of numpy.array giving weights for each layer or None if there is no\n        network\n        \"\"\"\n        return self.get_weights()\n\n    def set_weights_list(self, weights, auto_convert_keras=True):\n        \"\"\"\n        Set the network weights from a list of numpy arrays.\n\n        If a network exists, the weights are set directly on it.\n        Otherwise, the weights are stored and will be applied when the\n        network is created.\n\n        Parameters\n        ----------\n        weights : list of numpy.array\n            Weights for each layer\n        auto_convert_keras : bool\n            If True, attempt to auto-detect and convert Keras weight formats\n            to PyTorch format. Default True.\n        \"\"\"\n        if self._network is not None:\n            # Network exists, set weights directly\n            self._network.set_weights_list(weights, auto_convert_keras=auto_convert_keras)\n        else:\n            # Store weights for later application\n            self.network_weights = weights\n            # Store flag for auto-conversion\n            self._auto_convert_keras_weights = auto_convert_keras\n\n    def __getstate__(self):\n        \"\"\"\n        serialize to a dict. Model weights are included. For pickle support.\n\n        Returns\n        -------\n        dict\n\n        \"\"\"\n        self.update_network_description()\n        self.load_weights()\n        result = dict(self.__dict__)\n        result[\"_network\"] = None\n        result[\"prediction_cache\"] = None\n        return result\n\n    def __setstate__(self, state):\n        \"\"\"\n        Deserialize. For pickle support.\n        \"\"\"\n        self.__dict__.update(state)\n        self.prediction_cache = weakref.WeakKeyDictionary()\n\n    def peptides_to_network_input(self, peptides):\n        \"\"\"\n        Encode peptides to the fixed-length encoding expected by the neural\n        network (which depends on the architecture).\n\n        Parameters\n        ----------\n        peptides : EncodableSequences or list of string\n\n        Returns\n        -------\n        numpy.array\n        \"\"\"\n        encoder = EncodableSequences.create(peptides)\n        encoded = encoder.variable_length_to_fixed_length_vector_encoding(\n            **self.hyperparameters[\"peptide_encoding\"]\n        )\n        assert len(encoded) == len(peptides)\n        return encoded\n\n    @property\n    def supported_peptide_lengths(self):\n        \"\"\"\n        (minimum, maximum) lengths of peptides supported, inclusive.\n\n        Returns\n        -------\n        (int, int) tuple\n\n        \"\"\"\n        try:\n            self.peptides_to_network_input([\"\"])\n        except EncodingError as e:\n            return e.supported_peptide_lengths\n        raise RuntimeError(\"peptides_to_network_input did not raise\")\n\n    def allele_encoding_to_network_input(self, allele_encoding):\n        \"\"\"\n        Encode alleles to the fixed-length encoding expected by the neural\n        network (which depends on the architecture).\n\n        Parameters\n        ----------\n        allele_encoding : AlleleEncoding\n\n        Returns\n        -------\n        (numpy.array, numpy.array)\n\n        Indices and allele representations.\n\n        \"\"\"\n        return (\n            allele_encoding.indices.values,\n            allele_encoding.allele_representations(\n                self.hyperparameters[\"allele_amino_acid_encoding\"]\n            ),\n        )\n\n    @staticmethod\n    def data_dependent_weights_initialization(network, x_dict=None, method=\"lsuv\", verbose=1):\n        \"\"\"\n        Data dependent weights initialization.\n\n        Parameters\n        ----------\n        network : Class1NeuralNetworkModel\n        x_dict : dict of string -> numpy.ndarray\n            Training data\n        method : string\n            Initialization method. Currently only \"lsuv\" is supported.\n        verbose : int\n            Status updates printed to stdout if verbose > 0\n        \"\"\"\n        if verbose:\n            print(\"Performing data-dependent init: \", method)\n        if method == \"lsuv\":\n            assert x_dict is not None, \"Data required for LSUV init\"\n            lsuv_init(network, x_dict, verbose=verbose > 0)\n        else:\n            raise RuntimeError(\"Unsupported init method: \", method)\n\n    @staticmethod\n    def _regularized_parameters(network):\n        \"\"\"\n        Parameters subject to master-branch dense kernel regularization.\n        \"\"\"\n        for name, param in network.named_parameters():\n            if not param.requires_grad or not name.endswith(\"weight\"):\n                continue\n            if any(part in name for part in (\n                    \"peptide_dense_layers\",\n                    \"allele_dense_layers\",\n                    \"dense_layers\")):\n                yield param\n\n    @staticmethod\n    def _regularization_penalty(parameters, l1=0.0, l2=0.0):\n        \"\"\"\n        Match Keras kernel_regularizer semantics used on dense kernels.\n        \"\"\"\n        parameters = tuple(parameters)\n        if not parameters or (not l1 and not l2):\n            return None\n        penalty = torch.zeros((), device=parameters[0].device)\n        for param in parameters:\n            if l1:\n                penalty = penalty + (l1 * param.abs().sum())\n            if l2:\n                penalty = penalty + (l2 * param.square().sum())\n        return penalty\n\n    def get_device(self):\n        \"\"\"Get the PyTorch device to use.\"\"\"\n        return get_pytorch_device()\n\n    def fit_generator(\n            self,\n            generator,\n            validation_peptide_encoding,\n            validation_affinities,\n            validation_allele_encoding=None,\n            validation_inequalities=None,\n            validation_output_indices=None,\n            steps_per_epoch=10,\n            epochs=1000,\n            min_epochs=0,\n            patience=10,\n            min_delta=0.0,\n            verbose=1,\n            progress_callback=None,\n            progress_preamble=\"\",\n            progress_print_interval=5.0):\n        \"\"\"\n        Fit using a generator. Does not support many of the features of fit(),\n        such as random negative peptides.\n        \"\"\"\n        device = self.get_device()\n\n        fit_info = collections.defaultdict(list)\n\n        loss_obj = get_pytorch_loss(self.hyperparameters[\"loss\"])\n\n        (\n            validation_allele_input,\n            allele_representations,\n        ) = self.allele_encoding_to_network_input(validation_allele_encoding)\n\n        if self.network() is None:\n            self._network = self.make_network(\n                allele_representations=allele_representations,\n                **self.network_hyperparameter_defaults.subselect(self.hyperparameters)\n            )\n            if verbose > 0:\n                print(self.network())\n        network = self.network()\n        network.to(device)\n\n        self.set_allele_representations(allele_representations)\n\n        # Setup optimizer\n        optimizer = self._create_optimizer(network)\n        if self.hyperparameters[\"learning_rate\"] is not None:\n            for param_group in optimizer.param_groups:\n                param_group['lr'] = self.hyperparameters[\"learning_rate\"]\n        fit_info[\"learning_rate\"] = optimizer.param_groups[0]['lr']\n        regularization_parameters = tuple(self._regularized_parameters(network))\n        l1_reg = self.hyperparameters[\"dense_layer_l1_regularization\"]\n        l2_reg = self.hyperparameters[\"dense_layer_l2_regularization\"]\n\n        # Prepare validation data\n        validation_x_dict = {\n            \"peptide\": self.peptides_to_network_input(validation_peptide_encoding),\n            \"allele\": validation_allele_input,\n        }\n        encode_y_kwargs = {}\n        if validation_inequalities is not None:\n            encode_y_kwargs[\"inequalities\"] = validation_inequalities\n        if validation_output_indices is not None:\n            encode_y_kwargs[\"output_indices\"] = validation_output_indices\n\n        output = loss_obj.encode_y(from_ic50(validation_affinities), **encode_y_kwargs)\n\n        mutable_generator_state = {\n            \"yielded_values\": 0\n        }\n\n        def wrapped_generator():\n            for alleles, peptides, affinities in generator:\n                (allele_encoding_input, _) = self.allele_encoding_to_network_input(\n                    alleles\n                )\n                x_dict = {\n                    \"peptide\": self.peptides_to_network_input(peptides),\n                    \"allele\": allele_encoding_input,\n                }\n                y = from_ic50(affinities)\n                yield (x_dict, y)\n                mutable_generator_state[\"yielded_values\"] += len(affinities)\n\n        start = time.time()\n        iterator = wrapped_generator()\n\n        # Data dependent init\n        data_dependent_init = self.hyperparameters[\n            \"data_dependent_initialization_method\"\n        ]\n        if data_dependent_init and not self.fit_info:\n            first_chunk = next(iterator)\n            self.data_dependent_weights_initialization(\n                network,\n                first_chunk[0],\n                method=data_dependent_init,\n                verbose=verbose,\n            )\n            iterator = itertools.chain([first_chunk], iterator)\n\n        min_val_loss_iteration = None\n        min_val_loss = None\n        last_progress_print = 0\n        epoch = 1\n\n        while True:\n            epoch_start_time = time.time()\n            network.train()\n\n            epoch_losses = []\n            for step in range(steps_per_epoch):\n                try:\n                    x_dict, y = next(iterator)\n                except StopIteration:\n                    break\n\n                # Convert to tensors\n                peptide_tensor = torch.from_numpy(x_dict[\"peptide\"]).float().to(device)\n                allele_tensor = torch.from_numpy(x_dict[\"allele\"]).float().to(device)\n                y_tensor = torch.from_numpy(y.astype(numpy.float32)).to(device)\n\n                optimizer.zero_grad()\n                inputs = {\"peptide\": peptide_tensor, \"allele\": allele_tensor}\n                predictions = network(inputs)\n                loss = loss_obj(predictions, y_tensor)\n                regularization_penalty = self._regularization_penalty(\n                    regularization_parameters,\n                    l1=l1_reg,\n                    l2=l2_reg,\n                )\n                if regularization_penalty is not None:\n                    loss = loss + regularization_penalty\n                loss.backward()\n                optimizer.step()\n                epoch_losses.append(loss.item())\n\n            # Compute validation loss\n            network.eval()\n            with torch.no_grad():\n                val_peptide = torch.from_numpy(validation_x_dict[\"peptide\"]).float().to(device)\n                val_allele = torch.from_numpy(validation_x_dict[\"allele\"]).float().to(device)\n                val_y = torch.from_numpy(output.astype(numpy.float32)).to(device)\n\n                val_inputs = {\"peptide\": val_peptide, \"allele\": val_allele}\n                val_predictions = network(val_inputs)\n                val_loss = loss_obj(val_predictions, val_y)\n                regularization_penalty = self._regularization_penalty(\n                    regularization_parameters,\n                    l1=l1_reg,\n                    l2=l2_reg,\n                )\n                if regularization_penalty is not None:\n                    val_loss = val_loss + regularization_penalty\n                val_loss = val_loss.item()\n\n            epoch_time = time.time() - epoch_start_time\n            train_loss = numpy.mean(epoch_losses) if epoch_losses else float('nan')\n            fit_info[\"loss\"].append(train_loss)\n            fit_info[\"val_loss\"].append(val_loss)\n\n            if min_val_loss is None or val_loss < min_val_loss - min_delta:\n                min_val_loss = val_loss\n                min_val_loss_iteration = epoch\n\n            patience_epoch_threshold = min(\n                epochs, max(min_val_loss_iteration + patience, min_epochs)\n            )\n\n            progress_message = (\n                \"epoch %3d/%3d [%0.2f sec.]: loss=%g val_loss=%g. Min val \"\n                \"loss %g at epoch %s. Cum. points: %d. Stop at epoch %d.\"\n                % (\n                    epoch,\n                    epochs,\n                    epoch_time,\n                    train_loss,\n                    val_loss,\n                    min_val_loss,\n                    min_val_loss_iteration,\n                    mutable_generator_state[\"yielded_values\"],\n                    patience_epoch_threshold,\n                )\n            ).strip()\n\n            if progress_print_interval is not None and (\n                time.time() - last_progress_print > progress_print_interval\n            ):\n                print(progress_preamble, progress_message)\n                last_progress_print = time.time()\n\n            if progress_callback:\n                progress_callback()\n\n            if epoch >= patience_epoch_threshold:\n                if progress_print_interval is not None:\n                    print(progress_preamble, \"STOPPING\", progress_message)\n                break\n            epoch += 1\n\n        fit_info[\"time\"] = time.time() - start\n        fit_info[\"num_points\"] = mutable_generator_state[\"yielded_values\"]\n        self.fit_info.append(dict(fit_info))\n\n    def _create_optimizer(self, network):\n        \"\"\"Create an optimizer for the network.\"\"\"\n        optimizer_name = self.hyperparameters[\"optimizer\"].lower()\n        lr = (\n            self.hyperparameters[\"learning_rate\"]\n            if self.hyperparameters[\"learning_rate\"] is not None\n            else 0.001\n        )\n\n        if optimizer_name == \"rmsprop\":\n            # Match Keras defaults: rho=0.9, epsilon=1e-07\n            return torch.optim.RMSprop(\n                network.parameters(), lr=lr, alpha=0.9, eps=1e-07)\n        elif optimizer_name == \"adam\":\n            # Match Keras default epsilon=1e-07.\n            return torch.optim.Adam(network.parameters(), lr=lr, eps=1e-07)\n        elif optimizer_name == \"sgd\":\n            return torch.optim.SGD(network.parameters(), lr=lr)\n        else:\n            return torch.optim.Adam(network.parameters(), lr=lr, eps=1e-07)\n\n    def fit(\n            self,\n            peptides,\n            affinities,\n            allele_encoding=None,\n            inequalities=None,\n            output_indices=None,\n            sample_weights=None,\n            shuffle_permutation=None,\n            verbose=1,\n            progress_callback=None,\n            progress_preamble=\"\",\n            progress_print_interval=5.0):\n        \"\"\"\n        Fit the neural network.\n\n        Parameters\n        ----------\n        peptides : EncodableSequences or list of string\n        affinities : list of float\n            nM affinities. Must be same length of as peptides.\n        allele_encoding : AlleleEncoding\n            If not specified, the model will be a single-allele predictor.\n        inequalities : list of string, each element one of \">\", \"<\", or \"=\".\n        output_indices : list of int\n            For multi-output models only.\n        sample_weights : list of float\n        shuffle_permutation : list of int\n        verbose : int\n        progress_callback : function\n        progress_preamble : string\n        progress_print_interval : float\n        \"\"\"\n        device = self.get_device()\n\n        encodable_peptides = EncodableSequences.create(peptides)\n        peptide_encoding = self.peptides_to_network_input(encodable_peptides)\n        fit_info = collections.defaultdict(list)\n\n        random_negatives_planner = RandomNegativePeptides(\n            **RandomNegativePeptides.hyperparameter_defaults.subselect(\n                self.hyperparameters\n            )\n        )\n        random_negatives_planner.plan(\n            peptides=encodable_peptides.sequences,\n            affinities=affinities,\n            alleles=allele_encoding.alleles if allele_encoding else None,\n            inequalities=inequalities,\n        )\n\n        random_negatives_allele_encoding = None\n        if allele_encoding is not None:\n            random_negatives_allele_encoding = AlleleEncoding(\n                random_negatives_planner.get_alleles(), borrow_from=allele_encoding\n            )\n        num_random_negatives = random_negatives_planner.get_total_count()\n\n        y_values = from_ic50(numpy.asarray(affinities))\n        assert numpy.isnan(y_values).sum() == 0, y_values\n\n        if inequalities is not None:\n            adjusted_inequalities = (\n                pandas.Series(inequalities)\n                .map({\n                    \"=\": \"=\",\n                    \">\": \"<\",\n                    \"<\": \">\",\n                })\n                .values\n            )\n        else:\n            adjusted_inequalities = numpy.tile(\"=\", len(y_values))\n\n        if len(adjusted_inequalities) != len(y_values):\n            raise ValueError(\"Inequalities and y_values must have same length\")\n\n        x_dict_without_random_negatives = {\n            \"peptide\": peptide_encoding,\n        }\n        allele_representations = None\n        if allele_encoding is not None:\n            (\n                allele_encoding_input,\n                allele_representations,\n            ) = self.allele_encoding_to_network_input(allele_encoding)\n            x_dict_without_random_negatives[\"allele\"] = allele_encoding_input\n\n        # Shuffle\n        if shuffle_permutation is None:\n            shuffle_permutation = numpy.random.permutation(len(y_values))\n        y_values = y_values[shuffle_permutation]\n        peptide_encoding = peptide_encoding[shuffle_permutation]\n        adjusted_inequalities = adjusted_inequalities[shuffle_permutation]\n        for key in x_dict_without_random_negatives:\n            x_dict_without_random_negatives[key] = x_dict_without_random_negatives[key][\n                shuffle_permutation\n            ]\n        if sample_weights is not None:\n            sample_weights = numpy.array(sample_weights, copy=False)[shuffle_permutation]\n        if output_indices is not None:\n            output_indices = numpy.array(output_indices, copy=False)[shuffle_permutation]\n\n        loss_obj = get_pytorch_loss(self.hyperparameters[\"loss\"])\n\n        if not loss_obj.supports_inequalities and (\n            any(inequality != \"=\" for inequality in adjusted_inequalities)\n        ):\n            raise ValueError(\"Loss %s does not support inequalities\" % loss_obj)\n\n        if (\n            not loss_obj.supports_multiple_outputs\n            and output_indices is not None\n            and (output_indices != 0).any()\n        ):\n            raise ValueError(\"Loss %s does not support multiple outputs\" % loss_obj)\n\n        if self.hyperparameters[\"num_outputs\"] != 1:\n            if output_indices is None:\n                raise ValueError(\"Must supply output_indices for multi-output predictor\")\n\n        if self.network() is None:\n            self._network = self.make_network(\n                allele_representations=allele_representations,\n                **self.network_hyperparameter_defaults.subselect(self.hyperparameters)\n            )\n            if verbose > 0:\n                print(self.network())\n\n        network = self.network()\n        network.to(device)\n\n        if allele_representations is not None:\n            self.set_allele_representations(allele_representations)\n\n        optimizer = self._create_optimizer(network)\n        if self.hyperparameters[\"learning_rate\"] is not None:\n            for param_group in optimizer.param_groups:\n                param_group['lr'] = self.hyperparameters[\"learning_rate\"]\n        fit_info[\"learning_rate\"] = optimizer.param_groups[0]['lr']\n\n        # Prepare y values with random negatives\n        if loss_obj.supports_inequalities:\n            random_negative_ic50 = self.hyperparameters[\"random_negative_affinity_min\"]\n            random_negative_target = from_ic50(random_negative_ic50)\n\n            y_with_negatives = numpy.concatenate([\n                numpy.tile(random_negative_target, num_random_negatives),\n                y_values,\n            ])\n            adjusted_inequalities_with_random_negatives = (\n                [\"<\"] * num_random_negatives + list(adjusted_inequalities)\n            )\n        else:\n            y_with_negatives = numpy.concatenate([\n                from_ic50(\n                    numpy.random.uniform(\n                        self.hyperparameters[\"random_negative_affinity_min\"],\n                        self.hyperparameters[\"random_negative_affinity_max\"],\n                        num_random_negatives,\n                    )\n                ),\n                y_values,\n            ])\n            adjusted_inequalities_with_random_negatives = None\n\n        if sample_weights is not None:\n            sample_weights_with_negatives = numpy.concatenate([\n                numpy.ones(num_random_negatives),\n                sample_weights\n            ])\n        else:\n            sample_weights_with_negatives = None\n\n        if output_indices is not None:\n            random_negative_output_indices = (\n                self.hyperparameters[\"random_negative_output_indices\"]\n                if self.hyperparameters[\"random_negative_output_indices\"]\n                else list(range(0, self.hyperparameters[\"num_outputs\"]))\n            )\n            output_indices_with_negatives = numpy.concatenate([\n                pandas.Series(random_negative_output_indices, dtype=int)\n                .sample(n=num_random_negatives, replace=True)\n                .values,\n                output_indices,\n            ])\n        else:\n            output_indices_with_negatives = None\n\n        # Encode y\n        encode_y_kwargs = {}\n        if adjusted_inequalities_with_random_negatives is not None:\n            encode_y_kwargs[\"inequalities\"] = adjusted_inequalities_with_random_negatives\n        if output_indices_with_negatives is not None:\n            encode_y_kwargs[\"output_indices\"] = output_indices_with_negatives\n\n        y_encoded = loss_obj.encode_y(y_with_negatives, **encode_y_kwargs)\n\n        min_val_loss_iteration = None\n        min_val_loss = None\n\n        needs_initialization = (\n            self.hyperparameters[\"data_dependent_initialization_method\"] is not None\n            and not self.fit_info\n        )\n\n        start = time.time()\n        last_progress_print = None\n\n        # Validation split (fixed across epochs; only training data is reshuffled)\n        val_split = self.hyperparameters[\"validation_split\"]\n        n_total = len(y_encoded)\n        n_val = int(n_total * val_split)\n        n_train = n_total - n_val\n        indices = numpy.arange(n_total)\n        if n_val > 0:\n            train_indices_base = indices[:n_train]\n            val_indices = indices[n_train:]\n        else:\n            train_indices_base = indices\n            val_indices = None\n\n        regularization_parameters = tuple(self._regularized_parameters(network))\n        l1_reg = self.hyperparameters[\"dense_layer_l1_regularization\"]\n        l2_reg = self.hyperparameters[\"dense_layer_l2_regularization\"]\n\n        for epoch in range(self.hyperparameters[\"max_epochs\"]):\n            random_negative_peptides = EncodableSequences.create(\n                random_negatives_planner.get_peptides()\n            )\n            random_negative_peptides_encoding = self.peptides_to_network_input(\n                random_negative_peptides\n            )\n\n            # Build x_dict with random negatives\n            if len(random_negative_peptides) > 0:\n                x_peptide = numpy.concatenate([\n                    random_negative_peptides_encoding,\n                    x_dict_without_random_negatives[\"peptide\"],\n                ])\n                if \"allele\" in x_dict_without_random_negatives:\n                    x_allele = numpy.concatenate([\n                        self.allele_encoding_to_network_input(\n                            random_negatives_allele_encoding\n                        )[0],\n                        x_dict_without_random_negatives[\"allele\"],\n                    ])\n                else:\n                    x_allele = None\n            else:\n                x_peptide = x_dict_without_random_negatives[\"peptide\"]\n                x_allele = x_dict_without_random_negatives.get(\"allele\")\n\n            if needs_initialization:\n                x_init = {\"peptide\": x_peptide}\n                if x_allele is not None:\n                    x_init[\"allele\"] = x_allele\n                self.data_dependent_weights_initialization(\n                    network,\n                    x_init,\n                    method=self.hyperparameters[\"data_dependent_initialization_method\"],\n                    verbose=verbose,\n                )\n                needs_initialization = False\n\n            # Train/val split (keep validation fixed)\n            train_indices = train_indices_base.copy()\n            numpy.random.shuffle(train_indices)\n\n            # Training\n            network.train()\n            epoch_start = time.time()\n\n            # Create batches\n            batch_size = self.hyperparameters[\"minibatch_size\"]\n            train_losses = []\n\n            for batch_start in range(0, n_train, batch_size):\n                batch_idx = train_indices[batch_start:batch_start + batch_size]\n\n                peptide_batch = torch.from_numpy(x_peptide[batch_idx]).float().to(device)\n                y_batch = torch.from_numpy(y_encoded[batch_idx].astype(numpy.float32)).to(device)\n\n                inputs = {\"peptide\": peptide_batch}\n                if x_allele is not None:\n                    allele_batch = torch.from_numpy(x_allele[batch_idx]).float().to(device)\n                    inputs[\"allele\"] = allele_batch\n\n                optimizer.zero_grad()\n                predictions = network(inputs)\n                weights_batch = None\n                if sample_weights_with_negatives is not None:\n                    weights_batch = torch.from_numpy(\n                        sample_weights_with_negatives[batch_idx]\n                    ).float().to(device)\n                loss = loss_obj(predictions, y_batch, sample_weights=weights_batch)\n                regularization_penalty = self._regularization_penalty(\n                    regularization_parameters,\n                    l1=l1_reg,\n                    l2=l2_reg,\n                )\n                if regularization_penalty is not None:\n                    loss = loss + regularization_penalty\n                loss.backward()\n                optimizer.step()\n                train_losses.append(loss.item())\n\n            epoch_time = time.time() - epoch_start\n            train_loss = numpy.mean(train_losses)\n            fit_info[\"loss\"].append(train_loss)\n\n            # Validation\n            if val_split > 0:\n                network.eval()\n                with torch.no_grad():\n                    val_peptide = torch.from_numpy(x_peptide[val_indices]).float().to(device)\n                    val_y = torch.from_numpy(y_encoded[val_indices].astype(numpy.float32)).to(device)\n                    val_inputs = {\"peptide\": val_peptide}\n                    if x_allele is not None:\n                        val_allele = torch.from_numpy(x_allele[val_indices]).float().to(device)\n                        val_inputs[\"allele\"] = val_allele\n                    val_predictions = network(val_inputs)\n                    val_weights = None\n                    if sample_weights_with_negatives is not None:\n                        val_weights = torch.from_numpy(\n                            sample_weights_with_negatives[val_indices]\n                        ).float().to(device)\n                    val_loss = loss_obj(\n                        val_predictions,\n                        val_y,\n                        sample_weights=val_weights,\n                    )\n                    regularization_penalty = self._regularization_penalty(\n                        regularization_parameters,\n                        l1=l1_reg,\n                        l2=l2_reg,\n                    )\n                    if regularization_penalty is not None:\n                        val_loss = val_loss + regularization_penalty\n                    val_loss = val_loss.item()\n                fit_info[\"val_loss\"].append(val_loss)\n\n            # Progress printing\n            if progress_print_interval is not None and (\n                not last_progress_print\n                or (time.time() - last_progress_print > progress_print_interval)\n            ):\n                print(\n                    (\n                        progress_preamble\n                        + \" \"\n                        + \"Epoch %3d / %3d [%0.2f sec]: loss=%g. \"\n                        \"Min val loss (%s) at epoch %s\"\n                        % (\n                            epoch,\n                            self.hyperparameters[\"max_epochs\"],\n                            epoch_time,\n                            train_loss,\n                            str(min_val_loss),\n                            min_val_loss_iteration,\n                        )\n                    ).strip()\n                )\n                last_progress_print = time.time()\n\n            # Early stopping\n            if val_split > 0:\n                if min_val_loss is None or (\n                    val_loss < min_val_loss - self.hyperparameters[\"min_delta\"]\n                ):\n                    min_val_loss = val_loss\n                    min_val_loss_iteration = epoch\n\n                if self.hyperparameters[\"early_stopping\"]:\n                    threshold = min_val_loss_iteration + self.hyperparameters[\"patience\"]\n                    if epoch > threshold:\n                        if progress_print_interval is not None:\n                            print(\n                                (\n                                    progress_preamble\n                                    + \" \"\n                                    + \"Stopping at epoch %3d / %3d: loss=%g. \"\n                                    \"Min val loss (%g) at epoch %s\"\n                                    % (\n                                        epoch,\n                                        self.hyperparameters[\"max_epochs\"],\n                                        train_loss,\n                                        min_val_loss if min_val_loss is not None else numpy.nan,\n                                        min_val_loss_iteration,\n                                    )\n                                ).strip()\n                            )\n                        break\n\n            if progress_callback:\n                progress_callback()\n\n            gc.collect()\n\n        fit_info[\"time\"] = time.time() - start\n        fit_info[\"num_points\"] = len(peptides)\n        self.fit_info.append(dict(fit_info))\n\n    def predict(\n            self,\n            peptides,\n            allele_encoding=None,\n            batch_size=DEFAULT_PREDICT_BATCH_SIZE,\n            output_index=0):\n        \"\"\"\n        Predict affinities.\n\n        Parameters\n        ----------\n        peptides : EncodableSequences or list of string\n        allele_encoding : AlleleEncoding, optional\n        batch_size : int\n        output_index : int or None\n\n        Returns\n        -------\n        numpy.array of nM affinity predictions\n        \"\"\"\n        assert self.prediction_cache is not None\n        use_cache = allele_encoding is None and isinstance(peptides, EncodableSequences)\n        if use_cache and peptides in self.prediction_cache:\n            return self.prediction_cache[peptides].copy()\n\n        device = self.get_device()\n\n        x_dict = {\"peptide\": self.peptides_to_network_input(peptides)}\n\n        if allele_encoding is not None:\n            (\n                allele_encoding_input,\n                allele_representations,\n            ) = self.allele_encoding_to_network_input(allele_encoding)\n            x_dict[\"allele\"] = allele_encoding_input\n            self.set_allele_representations(allele_representations)\n            network = self.network()\n        else:\n            network = self.network(borrow=True)\n\n        network.to(device)\n        network.eval()\n\n        # Batch prediction\n        n_samples = len(x_dict[\"peptide\"])\n        all_predictions = []\n\n        def prediction_tensor(batch_array):\n            batch_array = numpy.asarray(batch_array, dtype=numpy.float32)\n            if not batch_array.flags.writeable:\n                batch_array = batch_array.copy()\n            return torch.from_numpy(batch_array).to(device)\n\n        with torch.no_grad():\n            for batch_start in range(0, n_samples, batch_size):\n                batch_end = min(batch_start + batch_size, n_samples)\n\n                peptide_batch = prediction_tensor(\n                    x_dict[\"peptide\"][batch_start:batch_end]\n                )\n\n                inputs = {\"peptide\": peptide_batch}\n                if \"allele\" in x_dict:\n                    allele_batch = prediction_tensor(\n                        x_dict[\"allele\"][batch_start:batch_end]\n                    )\n                    inputs[\"allele\"] = allele_batch\n\n                batch_predictions = network(inputs)\n                all_predictions.append(batch_predictions.cpu().numpy())\n\n        raw_predictions = numpy.concatenate(all_predictions, axis=0)\n        predictions = numpy.array(raw_predictions, dtype=\"float64\")\n\n        if output_index is not None:\n            predictions = predictions[:, output_index]\n\n        result = to_ic50(predictions)\n        if use_cache:\n            self.prediction_cache[peptides] = result\n        return result\n\n    @classmethod\n    def merge(cls, models, merge_method=\"average\"):\n        \"\"\"\n        Merge multiple models at the neural network level.\n\n        Parameters\n        ----------\n        models : list of Class1NeuralNetwork\n        merge_method : string, one of \"average\", \"sum\", or \"concatenate\"\n\n        Returns\n        -------\n        Class1NeuralNetwork\n        \"\"\"\n        if merge_method == \"allele-specific\":\n            raise NotImplementedError(\"Allele-specific merge is not implemented\")\n        if len(models) == 1:\n            return models[0]\n        assert len(models) > 1\n        if any(not model.network().has_allele for model in models):\n            raise NotImplementedError(\"Merging allele-specific models is not implemented\")\n\n        # For now, we create a simple ensemble wrapper\n        # that averages predictions\n        result = Class1NeuralNetwork(**dict(models[0].hyperparameters))\n\n        # Remove hyperparameters not shared by all models\n        for model in models:\n            for key, value in model.hyperparameters.items():\n                if result.hyperparameters.get(key, value) != value:\n                    del result.hyperparameters[key]\n\n        # Create merged network\n        result._network = MergedClass1NeuralNetwork(\n            [model.network() for model in models],\n            merge_method=merge_method\n        )\n        result.update_network_description()\n\n        return result\n\n    def make_network(\n            self,\n            peptide_encoding,\n            allele_amino_acid_encoding,\n            allele_dense_layer_sizes,\n            peptide_dense_layer_sizes,\n            peptide_allele_merge_method,\n            peptide_allele_merge_activation,\n            layer_sizes,\n            dense_layer_l1_regularization,\n            dense_layer_l2_regularization,\n            activation,\n            init,\n            output_activation,\n            dropout_probability,\n            batch_normalization,\n            locally_connected_layers,\n            topology,\n            num_outputs=1,\n            allele_representations=None):\n        \"\"\"\n        Helper function to make a PyTorch network for class 1 affinity prediction.\n        \"\"\"\n        peptide_encoding_shape = self.peptides_to_network_input([]).shape[1:]\n\n        return Class1NeuralNetworkModel(\n            peptide_encoding_shape=peptide_encoding_shape,\n            allele_representations=allele_representations,\n            locally_connected_layers=locally_connected_layers,\n            peptide_dense_layer_sizes=peptide_dense_layer_sizes,\n            allele_dense_layer_sizes=allele_dense_layer_sizes,\n            layer_sizes=layer_sizes,\n            peptide_allele_merge_method=peptide_allele_merge_method,\n            peptide_allele_merge_activation=peptide_allele_merge_activation,\n            activation=activation,\n            output_activation=output_activation,\n            dropout_probability=dropout_probability,\n            batch_normalization=batch_normalization,\n            dense_layer_l1_regularization=dense_layer_l1_regularization,\n            dense_layer_l2_regularization=dense_layer_l2_regularization,\n            topology=topology,\n            num_outputs=num_outputs,\n            init=init,\n        )\n\n    def clear_allele_representations(self):\n        \"\"\"\n        Set allele representations to an empty array.\n        \"\"\"\n        original_model = self.network()\n        if original_model is not None and original_model.allele_embedding is not None:\n            existing_shape = original_model.allele_embedding.weight.shape\n            new_weight = numpy.zeros(\n                shape=(1, existing_shape[1]),\n                dtype=numpy.float32\n            )\n            target = original_model.allele_embedding.weight\n            original_model.allele_embedding.weight.data = torch.from_numpy(\n                new_weight\n            ).to(device=target.device, dtype=target.dtype)\n            original_model.allele_embedding.weight.requires_grad = False\n\n    def set_allele_representations(self, allele_representations, force_surgery=False):\n        \"\"\"\n        Set the allele representations in use by this model.\n\n        Parameters\n        ----------\n        allele_representations : numpy.ndarray of shape (a, l, m)\n        force_surgery : bool\n        \"\"\"\n        network = self.network()\n        if network is None:\n            return\n\n        reshaped = allele_representations.reshape(\n            (\n                allele_representations.shape[0],\n                numpy.prod(allele_representations.shape[1:]),\n            )\n        ).astype(numpy.float32)\n\n        # Handle merged networks (ensembles)\n        if isinstance(network, MergedClass1NeuralNetwork):\n            for sub_network in network.networks:\n                self._update_embedding(sub_network, reshaped, force_surgery)\n        elif hasattr(network, 'allele_embedding') and network.allele_embedding is not None:\n            self._update_embedding(network, reshaped, force_surgery)\n\n    def _update_embedding(self, network, reshaped, force_surgery):\n        \"\"\"Update the allele embedding for a single network.\"\"\"\n        if network.allele_embedding is None:\n            return\n\n        target_weight = network.allele_embedding.weight\n        existing_shape = target_weight.shape\n        target_device = target_weight.device\n        target_dtype = target_weight.dtype\n\n        if existing_shape[0] > reshaped.shape[0] and not force_surgery:\n            # Extend with NaNs\n            reshaped = numpy.append(\n                reshaped,\n                numpy.ones([existing_shape[0] - reshaped.shape[0], reshaped.shape[1]])\n                * numpy.nan,\n                axis=0,\n            )\n\n        if existing_shape != reshaped.shape:\n            # Need to resize embedding\n            new_embedding = nn.Embedding(\n                num_embeddings=reshaped.shape[0],\n                embedding_dim=reshaped.shape[1]\n            ).to(device=target_device)\n            new_embedding.weight.data = torch.from_numpy(reshaped).to(\n                device=target_device,\n                dtype=target_dtype,\n            )\n            new_embedding.weight.requires_grad = False\n            network.allele_embedding = new_embedding\n        else:\n            network.allele_embedding.weight.data = torch.from_numpy(\n                reshaped\n            ).to(device=target_device, dtype=target_dtype)\n            network.allele_embedding.weight.requires_grad = False\n\n\nclass MergedClass1NeuralNetwork(nn.Module):\n    \"\"\"\n    A merged ensemble of Class1NeuralNetworkModel instances.\n    \"\"\"\n\n    def __init__(self, networks, merge_method=\"average\"):\n        super(MergedClass1NeuralNetwork, self).__init__()\n        self.networks = nn.ModuleList(networks)\n        self.merge_method = merge_method\n\n    def forward(self, inputs):\n        outputs = [network(inputs) for network in self.networks]\n        stacked = torch.stack(outputs, dim=-1)\n\n        if self.merge_method == \"average\":\n            return stacked.mean(dim=-1)\n        elif self.merge_method == \"sum\":\n            return stacked.sum(dim=-1)\n        elif self.merge_method == \"concatenate\":\n            return torch.cat(outputs, dim=-1)\n        else:\n            raise ValueError(f\"Unknown merge method: {self.merge_method}\")\n\n    def get_weights_list(self):\n        \"\"\"Get all weights as a flat list.\"\"\"\n        weights = []\n        for network in self.networks:\n            weights.extend(network.get_weights_list())\n        return weights\n\n    def set_weights_list(self, weights, auto_convert_keras=False):\n        \"\"\"Set weights from a flat list.\"\"\"\n        idx = 0\n        for network in self.networks:\n            n_weights = len(list(network.parameters())) + len(list(network.buffers()))\n            network.set_weights_list(weights[idx:idx + n_weights], auto_convert_keras=auto_convert_keras)\n            idx += n_weights\n"
  },
  {
    "path": "mhcflurry/class1_presentation_predictor.py",
    "content": "from os.path import join, exists\nfrom os import mkdir\nfrom socket import gethostname\nfrom getpass import getuser\n\nimport time\nimport collections\nimport logging\nimport warnings\nimport numpy\nimport pandas\nimport sklearn\nimport sklearn.linear_model\n\n\ntry:\n    import tqdm\nexcept ImportError:\n    tdqm = None\n\nfrom .version import __version__\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .class1_processing_predictor import Class1ProcessingPredictor\nfrom .class1_neural_network import DEFAULT_PREDICT_BATCH_SIZE\nfrom .encodable_sequences import EncodableSequences\nfrom .regression_target import from_ic50\nfrom .downloads import get_default_class1_presentation_models_dir\nfrom .percent_rank_transform import PercentRankTransform\n\n\nMAX_ALLELES_PER_SAMPLE = 6\nPREDICT_BATCH_SIZE = DEFAULT_PREDICT_BATCH_SIZE\nPREDICT_CHUNK_SIZE = 100000  # currently used only for cleavage prediction\n\n\nclass Class1PresentationPredictor(object):\n    \"\"\"\n    A logistic regression model over predicted binding affinity (BA) and antigen\n    processing (AP) score.\n\n    Instances of this class delegate to Class1AffinityPredictor and\n    Class1ProcessingPredictor instances to generate BA and AP predictions.\n    These predictions are combined using a logistic regression model to give\n    a \"presentation score\" prediction.\n\n    Most users will call the `load` static method to get an instance of this\n    class, then call the `predict` method to generate predictions.\n    \"\"\"\n    model_inputs = [\"affinity_score\", \"processing_score\"]\n\n    def __init__(\n            self,\n            affinity_predictor=None,\n            processing_predictor_with_flanks=None,\n            processing_predictor_without_flanks=None,\n            weights_dataframe=None,\n            metadata_dataframes=None,\n            percent_rank_transform=None,\n            provenance_string=None):\n\n        self.affinity_predictor = affinity_predictor\n        self.processing_predictor_with_flanks = processing_predictor_with_flanks\n        self.processing_predictor_without_flanks = processing_predictor_without_flanks\n        self.weights_dataframe = weights_dataframe\n        self.metadata_dataframes = (\n            dict(metadata_dataframes) if metadata_dataframes else {})\n        self._models_cache = {}\n        self.percent_rank_transform = percent_rank_transform\n        self.provenance_string = provenance_string\n\n    @property\n    def supported_alleles(self):\n        \"\"\"\n        List of alleles supported by the underlying Class1AffinityPredictor\n        \"\"\"\n        return self.affinity_predictor.supported_alleles\n\n    @property\n    def supported_peptide_lengths(self):\n        \"\"\"\n        (min, max) of supported peptide lengths, inclusive.\n        \"\"\"\n        return self.affinity_predictor.supported_peptide_lengths\n\n    @property\n    def supports_affinity_prediction(self):\n        \"\"\"Is there an affinity predictor associated with this instance?\"\"\"\n        return self.affinity_predictor is not None\n\n    @property\n    def supports_processing_prediction(self):\n        \"\"\"Is there a processing predictor associated with this instance?\"\"\"\n        return (\n            self.processing_predictor_with_flanks is not None or\n            self.processing_predictor_without_flanks is not None)\n\n    @property\n    def supports_presentation_prediction(self):\n        \"\"\"Can this instance predict presentation?\"\"\"\n        return (\n            self.supports_affinity_prediction and\n            self.supports_processing_prediction and\n            self.weights_dataframe is not None)\n\n    def predict_affinity(\n            self,\n            peptides,\n            alleles,\n            sample_names=None,\n            include_affinity_percentile=True,\n            verbose=1,\n            throw=True):\n        \"\"\"\n        Predict binding affinities across samples (each corresponding to up to\n        six MHC  I alleles).\n\n        Two modes are supported: each peptide can be evaluated for binding to\n        any of the alleles in any sample (this is what happens when sample_names\n        is None), or the i'th peptide can be evaluated for binding the alleles\n        of the sample given by the i'th entry in sample_names.\n\n        For example, if we don't specify sample_names, then predictions\n        are taken for all combinations of samples and peptides, for a result\n        size of num peptides *  num samples:\n\n        >>> predictor = Class1PresentationPredictor.load()\n        >>> predictor.predict_affinity(\n        ...    peptides=[\"SIINFEKL\", \"PEPTIDE\"],\n        ...    alleles={\n        ...        \"sample1\": [\"A0201\", \"A0301\", \"B0702\"],\n        ...        \"sample2\": [\"A0101\", \"C0202\"],\n        ...    },\n        ...    verbose=0)\n            peptide  peptide_num sample_name   affinity best_allele  affinity_percentile\n        0  SIINFEKL            0     sample1  11927.161       A0201                6.296\n        1   PEPTIDE            1     sample1  32507.082       A0201               71.249\n        2  SIINFEKL            0     sample2   2725.593       C0202                6.662\n        3   PEPTIDE            1     sample2  28304.336       C0202               54.652\n\n        In contrast, here we specify sample_names, so peptide is evaluated for\n        binding the alleles in the corresponding sample, for a result size equal\n        to the number of peptides:\n\n        >>> predictor.predict_affinity(\n        ...    peptides=[\"SIINFEKL\", \"PEPTIDE\"],\n        ...    alleles={\n        ...        \"sample1\": [\"A0201\", \"A0301\", \"B0702\"],\n        ...        \"sample2\": [\"A0101\", \"C0202\"],\n        ...    },\n        ...    sample_names=[\"sample2\", \"sample1\"],\n        ...    verbose=0)\n            peptide  peptide_num sample_name   affinity best_allele  affinity_percentile\n        0  SIINFEKL            0     sample2   2725.592       C0202                6.662\n        1   PEPTIDE            1     sample1  32507.078       A0201               71.249\n\n        Parameters\n        ----------\n        peptides : list of string\n            Peptide sequences\n        alleles : dict of string -> list of string\n            Keys are sample names, values are the alleles (genotype) for\n            that sample\n        sample_names : list of string [same length as peptides]\n            Sample names corresponding to each peptide. If None, then\n            predictions are generated for all sample genotypes  across all\n            peptides.\n        include_affinity_percentile : bool\n            Whether to include affinity percentile ranks\n        verbose : int\n            Set to 0 for quiet.\n        throw : verbose\n            Whether to throw exception (vs. just log a warning) on invalid\n            peptides, etc.\n\n        Returns\n        -------\n        pandas.DataFrame : predictions\n        \"\"\"\n        df = pandas.DataFrame({\n            \"peptide\": numpy.asarray(peptides),\n        })\n        df[\"peptide_num\"] = df.index\n        if sample_names is None:\n            peptides = EncodableSequences.create(peptides)\n            all_alleles = set()\n            for lst in alleles.values():\n                all_alleles.update(lst)\n\n            iterator = sorted(all_alleles)\n\n            if verbose > 0:\n                print(\"Predicting affinities.\")\n                if tqdm is not None:\n                    iterator = tqdm.tqdm(iterator, total=len(all_alleles))\n\n            predictions_df = pandas.DataFrame(index=df.index)\n            for allele in iterator:\n                predictions_df[allele] = self.affinity_predictor.predict(\n                    peptides=peptides,\n                    allele=allele,\n                    model_kwargs={'batch_size': PREDICT_BATCH_SIZE},\n                    throw=throw)\n\n            dfs = []\n            for (sample_name, sample_alleles) in alleles.items():\n                new_df = df.copy()\n                new_df[\"sample_name\"] = sample_name\n                new_df[\"affinity\"] = predictions_df[\n                    sample_alleles\n                ].min(axis=1).values\n                if len(df) == 0:\n                    new_df[\"best_allele\"] = []\n                else:\n                    sample_predictions = predictions_df[sample_alleles]\n                    best_allele = pandas.Series(index=sample_predictions.index, dtype=object)\n                    valid = sample_predictions.notna().any(axis=1)\n                    if valid.any():\n                        best_allele.loc[valid] = sample_predictions.loc[valid].idxmin(axis=1)\n                    new_df[\"best_allele\"] = best_allele.values\n                dfs.append(new_df)\n\n            result_df = pandas.concat(dfs, ignore_index=True)\n        else:\n            df[\"sample_name\"] = numpy.asarray(sample_names)\n\n            iterator = df.groupby(\"sample_name\")\n            if verbose > 0:\n                print(\"Predicting affinities.\")\n                if tqdm is not None:\n                    iterator = tqdm.tqdm(\n                        iterator, total=df.sample_name.nunique())\n\n            for (sample, sub_df) in iterator:\n                predictions_df = pandas.DataFrame(index=sub_df.index)\n                sample_peptides = EncodableSequences.create(sub_df.peptide.values)\n                for allele in alleles[sample]:\n                    predictions_df[allele] = self.affinity_predictor.predict(\n                        peptides=sample_peptides,\n                        allele=allele,\n                        model_kwargs={'batch_size': PREDICT_BATCH_SIZE},\n                        throw=throw)\n                df.loc[\n                    sub_df.index, \"affinity\"\n                ] = predictions_df.min(axis=1).values\n                best_allele = pandas.Series(index=predictions_df.index, dtype=object)\n                valid = predictions_df.notna().any(axis=1)\n                if valid.any():\n                    best_allele.loc[valid] = predictions_df.loc[valid].idxmin(axis=1)\n                df.loc[\n                    sub_df.index, \"best_allele\"\n                ] = best_allele.values\n\n            result_df = df\n\n        if include_affinity_percentile:\n            result_df[\"affinity_percentile\"] = (\n                self.affinity_predictor.percentile_ranks(\n                    result_df.affinity.values,\n                    alleles=result_df.best_allele.values,\n                    throw=False))\n\n        return result_df\n\n    def predict_processing(\n            self, peptides, n_flanks=None, c_flanks=None, throw=True, verbose=1):\n        \"\"\"\n        Predict antigen processing scores for individual peptides, optionally\n        including flanking sequences for better cleavage prediction.\n\n        Parameters\n        ----------\n        peptides : list of string\n        n_flanks : list of string [same length as peptides]\n        c_flanks : list of string [same length as peptides]\n        throw : boolean\n            Whether to raise exception on unsupported peptides\n        verbose  : int\n\n        Returns\n        -------\n        numpy.array : Antigen processing scores for each peptide\n        \"\"\"\n\n        if (n_flanks is None) != (c_flanks is None):\n            raise ValueError(\"Specify both or neither of n_flanks, c_flanks\")\n\n        if n_flanks is None:\n            if self.processing_predictor_without_flanks is None:\n                raise ValueError(\"No processing predictor without flanks\")\n            predictor = self.processing_predictor_without_flanks\n            n_flanks = [\"\"] * len(peptides)\n            c_flanks = n_flanks\n        else:\n            if self.processing_predictor_with_flanks is None:\n                raise ValueError(\"No processing predictor with flanks\")\n            predictor = self.processing_predictor_with_flanks\n\n        if len(peptides) == 0:\n            return numpy.array([], dtype=float)\n\n        num_chunks = int(numpy.ceil(float(len(peptides)) / PREDICT_CHUNK_SIZE))\n        peptide_chunks = numpy.array_split(peptides, num_chunks)\n        n_flank_chunks = numpy.array_split(n_flanks, num_chunks)\n        c_flank_chunks = numpy.array_split(c_flanks, num_chunks)\n\n        iterator = zip(peptide_chunks, n_flank_chunks, c_flank_chunks)\n        if verbose > 0:\n            print(\"Predicting processing.\")\n            if tqdm is not None:\n                iterator = tqdm.tqdm(iterator, total=len(peptide_chunks))\n\n        result_chunks = []\n        for (peptide_chunk, n_flank_chunk, c_flank_chunk) in iterator:\n            result_chunk = predictor.predict(\n                peptides=peptide_chunk,\n                n_flanks=n_flank_chunk,\n                c_flanks=c_flank_chunk,\n                throw=throw,\n                batch_size=PREDICT_BATCH_SIZE)\n            result_chunks.append(result_chunk)\n        return numpy.concatenate(result_chunks)\n\n    def fit(\n            self,\n            targets,\n            peptides,\n            sample_names,\n            alleles,\n            n_flanks=None,\n            c_flanks=None,\n            verbose=1):\n        \"\"\"\n        Fit the presentation score logistic regression model.\n\n        Parameters\n        ----------\n        targets : list of int/float\n            1 indicates hit, 0 indicates decoy\n        peptides : list of string [same length as targets]\n        sample_names : list of string [same length as targets]\n        alleles : dict of string -> list of string\n            Keys are sample names, values are the alleles for that sample\n        n_flanks : list of string [same length as targets]\n        c_flanks : list of string [same length as targets]\n        verbose : int\n        \"\"\"\n\n        df = self.predict_affinity(\n            peptides=peptides,\n            alleles=alleles,\n            sample_names=sample_names,\n            verbose=verbose)\n        df[\"affinity_score\"] = from_ic50(df.affinity)\n        df[\"target\"] = numpy.asarray(targets)\n\n        if (n_flanks is None) != (c_flanks is None):\n            raise ValueError(\"Specify both or neither of n_flanks, c_flanks\")\n\n        with_flanks_list = []\n        if self.processing_predictor_without_flanks is not None:\n            with_flanks_list.append(False)\n\n        if n_flanks is not None and self.processing_predictor_with_flanks is not None:\n            with_flanks_list.append(True)\n\n        if not with_flanks_list:\n            raise RuntimeError(\"Can't fit any models\")\n\n        if self.weights_dataframe is None:\n            self.weights_dataframe = pandas.DataFrame()\n\n        for with_flanks in with_flanks_list:\n            model_name = 'with_flanks' if with_flanks else \"without_flanks\"\n            if verbose > 0:\n                print(\"Training variant\", model_name)\n\n            df[\"processing_score\"] = self.predict_processing(\n                peptides=df.peptide.values,\n                n_flanks=n_flanks if with_flanks else None,\n                c_flanks=c_flanks if with_flanks else None,\n                verbose=verbose)\n\n            model = self.get_model()\n            if verbose > 0:\n                print(\"Fitting LR model.\")\n                print(df)\n\n            model.fit(\n                X=df[self.model_inputs].values,\n                y=df.target.astype(float))\n\n            (intercept,) = model.intercept_.flatten()\n            self.weights_dataframe.loc[model_name, \"intercept\"] = intercept\n            for (name, value) in zip(self.model_inputs, numpy.squeeze(model.coef_)):\n                self.weights_dataframe.loc[model_name, name] = value\n            self._models_cache[model_name] = model\n\n    def get_model(self, name=None):\n        \"\"\"\n        Load or instantiate a new logistic regression model. Private helper\n        method.\n\n        Parameters\n        ----------\n        name : string\n            If None (the default), an un-fit LR model is returned. Otherwise the\n            weights are loaded for the specified model.\n\n        Returns\n        -------\n        sklearn.linear_model.LogisticRegression\n        \"\"\"\n        if name is None or name not in self._models_cache:\n            model = sklearn.linear_model.LogisticRegression(solver=\"lbfgs\")\n            if name is not None:\n                row = self.weights_dataframe.loc[name]\n                model.intercept_ = row.intercept\n                model.coef_ = numpy.expand_dims(\n                    row[self.model_inputs].values, axis=0)\n                model.classes_ = numpy.array([0, 1])\n        else:\n            model = self._models_cache[name]\n        return model\n\n    def predict(\n            self,\n            peptides,\n            alleles,\n            sample_names=None,\n            n_flanks=None,\n            c_flanks=None,\n            include_affinity_percentile=False,\n            verbose=1,\n            throw=True):\n        \"\"\"\n        Predict presentation scores across a set of peptides.\n\n        Presentation scores combine predictions for MHC I binding affinity\n        and antigen processing.\n\n        This method returns a pandas.DataFrame giving presentation scores plus\n        the binding affinity and processing predictions and other intermediate\n        results.\n\n        Example:\n\n        >>> predictor = Class1PresentationPredictor.load()\n        >>> predictor.predict(\n        ...    peptides=[\"SIINFEKL\", \"PEPTIDE\"],\n        ...    n_flanks=[\"NNN\", \"SNS\"],\n        ...    c_flanks=[\"CCC\", \"CNC\"],\n        ...    alleles={\n        ...        \"sample1\": [\"A0201\", \"A0301\", \"B0702\"],\n        ...        \"sample2\": [\"A0101\", \"C0202\"],\n        ...    },\n        ...    verbose=0)\n            peptide n_flank c_flank  peptide_num sample_name   affinity best_allele  processing_score  presentation_score  presentation_percentile\n        0  SIINFEKL     NNN     CCC            0     sample1  11927.161       A0201             0.838               0.145                    2.282\n        1   PEPTIDE     SNS     CNC            1     sample1  32507.082       A0201             0.025               0.003                  100.000\n        2  SIINFEKL     NNN     CCC            0     sample2   2725.593       C0202             0.838               0.416                    1.017\n        3   PEPTIDE     SNS     CNC            1     sample2  28304.338       C0202             0.025               0.003                   99.287\n\n        You can also specify sample_names, in which case peptide is evaluated\n        for binding the alleles in the corresponding sample only. See\n        `predict_affinity` for an examples.\n\n        Parameters\n        ----------\n        peptides : list of string\n            Peptide sequences\n        alleles : list of string or dict of string -> list of string\n            If you are predicting for a single sample, pass a list of strings\n            (up to 6) indicating the genotype. If you are predicting across\n            multiple samples, pass a dict where the keys are (arbitrary)\n            sample names and the values are the alleles to predict for that\n            sample. Set to an empty list or dict to perform processing\n            prediction only.\n        sample_names : list of string [same length as peptides]\n            If you are passing a dict for 'alleles', you can use this\n            argument to specify which peptides go with which samples. If it is\n            None, then predictions will be performed for each peptide across all\n            samples.\n        n_flanks : list of string [same length as peptides]\n            Upstream sequences before the peptide. Sequences of any length can\n            be given and a suffix of the size supported by the model will be\n            used.\n        c_flanks : list of string [same length as peptides]\n            Downstream sequences after the peptide. Sequences of any length can\n            be given and a prefix of the size supported by the model will be\n            used.\n        include_affinity_percentile : bool\n            Whether to include affinity percentile ranks\n        verbose : int\n            Set to 0 for quiet.\n        throw : verbose\n            Whether to throw exception (vs. just log a warning) on invalid\n            peptides, etc.\n\n        Returns\n        -------\n        pandas.DataFrame\n\n        Presentation scores and intermediate results.\n        \"\"\"\n\n        if isinstance(peptides, str):\n            raise TypeError(\"peptides must be a list not a string\")\n        if isinstance(alleles, str):\n            raise TypeError(\"alleles must be a list or dict\")\n\n        if not isinstance(alleles, dict):\n            # Make alleles into a dict.\n            if sample_names is not None:\n                raise ValueError(\n                    \"alleles must be a dict when sample_names is specified\")\n\n            alleles = numpy.asarray(alleles)\n            if len(alleles) > MAX_ALLELES_PER_SAMPLE:\n                raise ValueError(\n                    \"When alleles is a list, it must have at most %d elements. \"\n                    \"These alleles are taken to be a genotype for an \"\n                    \"individual, and the strongest prediction across alleles \"\n                    \"will be taken for each peptide. Note that this differs \"\n                    \"from Class1AffinityPredictor.predict(), where alleles \"\n                    \"is expected to be the same length as peptides.\"\n                    % MAX_ALLELES_PER_SAMPLE)\n\n            alleles = {\n                \"sample1\": alleles,\n            }\n\n        if (n_flanks is None) != (c_flanks is None):\n            raise ValueError(\"Specify both or neither of n_flanks, c_flanks\")\n\n        if self.supports_processing_prediction:\n            processing_scores = self.predict_processing(\n                peptides=peptides,\n                n_flanks=n_flanks,\n                c_flanks=c_flanks,\n                throw=throw,\n                verbose=verbose)\n        else:\n            processing_scores = None\n\n        if alleles:\n            df = self.predict_affinity(\n                peptides=peptides,\n                alleles=alleles,\n                sample_names=sample_names,  # might be None\n                include_affinity_percentile=include_affinity_percentile,\n                verbose=verbose,\n                throw=throw)\n\n            df[\"affinity_score\"] = from_ic50(df.affinity)\n        else:\n            # Processing prediction only.\n            df = pandas.DataFrame({\n                \"peptide_num\": numpy.arange(len(peptides)),\n                \"peptide\": peptides,\n            })\n            df[\"sample_name\"] = \"sample1\"\n\n        if processing_scores is not None:\n            df[\"processing_score\"] = df.peptide_num.map(\n                pandas.Series(processing_scores))\n            if c_flanks is not None:\n                df.insert(1, \"c_flank\", df.peptide_num.map(pandas.Series(c_flanks)))\n            if n_flanks is not None:\n                df.insert(1, \"n_flank\", df.peptide_num.map(pandas.Series(n_flanks)))\n\n        predict_presentation = (\n                \"affinity_score\" in df.columns and\n                \"processing_score\" in df.columns and\n                self.supports_presentation_prediction)\n        if predict_presentation:\n            if len(df) > 0:\n                model_name = 'with_flanks' if n_flanks is not None else \\\n                    \"without_flanks\"\n                model = self.get_model(model_name)\n                input_matrix = df[self.model_inputs]\n                null_mask = None\n                if not throw:\n                    # Invalid peptides will be null.\n                    null_mask = input_matrix.isnull().any(axis=1)\n                    input_matrix = input_matrix.fillna(0.0)\n                df[\"presentation_score\"] = model.predict_proba(\n                    input_matrix.values)[:,1]\n                if null_mask is not None:\n                    df.loc[null_mask, \"presentation_score\"] = numpy.nan\n                df[\"presentation_percentile\"] = self.percentile_ranks(\n                    df[\"presentation_score\"], throw=False)\n            else:\n                df[\"presentation_score\"] = []\n                df[\"presentation_percentile\"] = []\n            del df[\"affinity_score\"]\n        return df\n\n    def predict_sequences(\n            self,\n            sequences,\n            alleles,\n            result=\"best\",\n            comparison_quantity=None,\n            filter_value=None,\n            peptide_lengths=(8, 9, 10, 11),\n            use_flanks=True,\n            include_affinity_percentile=True,\n            verbose=1,\n            throw=True):\n        \"\"\"\n        Predict presentation across protein sequences.\n\n        Example:\n\n        >>> predictor = Class1PresentationPredictor.load()\n        >>> predictor.predict_sequences(\n        ...    sequences={\n        ...        'protein1': \"MDSKGSSQKGSRLLLLLVVSNLL\",\n        ...        'protein2': \"SSLPTPEDKEQAQQTHH\",\n        ...    },\n        ...    alleles={\n        ...        \"sample1\": [\"A0201\", \"A0301\", \"B0702\"],\n        ...        \"sample2\": [\"A0101\", \"C0202\"],\n        ...    },\n        ...    result=\"filtered\",\n        ...    comparison_quantity=\"affinity\",\n        ...    filter_value=500,\n        ...    verbose=0)\n          sequence_name  pos     peptide n_flank c_flank sample_name  affinity best_allele  affinity_percentile  processing_score  presentation_score  presentation_percentile\n        0      protein1   14   LLLVVSNLL   GSRLL             sample1    57.180       A0201                0.398             0.233               0.754                    0.351\n        1      protein1   13   LLLLVVSNL   KGSRL       L     sample1    57.339       A0201                0.398             0.031               0.586                    0.643\n        2      protein1    5   SSQKGSRLL   MDSKG   LLLVV     sample2   110.779       C0202                0.782             0.061               0.456                    0.920\n        3      protein1    6   SQKGSRLLL   DSKGS   LLVVS     sample2   254.480       C0202                1.735             0.102               0.303                    1.356\n        4      protein1   13  LLLLVVSNLL   KGSRL             sample1   260.390       A0201                1.012             0.158               0.345                    1.215\n        5      protein1   12  LLLLLVVSNL   QKGSR       L     sample1   308.150       A0201                1.094             0.015               0.206                    1.802\n        6      protein2    0   SSLPTPEDK           EQAQQ     sample2   410.354       C0202                2.398             0.003               0.158                    2.155\n        7      protein1    5    SSQKGSRL   MDSKG   LLLLV     sample2   444.321       C0202                2.512             0.026               0.159                    2.138\n        8      protein2    0   SSLPTPEDK           EQAQQ     sample1   459.296       A0301                0.971             0.003               0.144                    2.292\n        9      protein1    4   GSSQKGSRL    MDSK   LLLLV     sample2   469.052       C0202                2.595             0.014               0.146                    2.261\n\n        Parameters\n        ----------\n        sequences : str, list of string, or string -> string dict\n            Protein sequences. If a dict is given, the keys are arbitrary (\n            e.g. protein names), and the values are the amino acid sequences.\n        alleles : list of string, list of list of string, or dict of string -> list of string\n            MHC I alleles. Can be: (1) a string (a single allele), (2) a list of\n            strings (a single genotype), (3) a list of list of strings\n            (multiple genotypes, where the total number of genotypes must equal\n            the number of sequences), or (4) a dict giving multiple genotypes,\n            which will each be run over the sequences.\n        result : string\n            Specify 'best' to return the strongest peptide for each sequence,\n            'all' to return predictions for all peptides, or 'filtered' to\n            return predictions where the comparison_quantity is stronger\n            (i.e (<) for affinity, (>) for scores) than filter_value.\n        comparison_quantity : string\n            One of \"presentation_score\", \"processing_score\", \"affinity\", or\n            \"affinity_percentile\". Prediction to use to rank (if result is\n            \"best\") or filter (if result is \"filtered\") results. Default is\n            \"presentation_score\".\n        filter_value : float\n            Threshold value to use, only relevant when result is \"filtered\".\n            If comparison_quantity is \"affinity\", then all results less than\n            (i.e. tighter than) the specified nM affinity are retained. If it's\n            \"presentation_score\" or \"processing_score\" then results greater than\n            the indicated filter_value are retained.\n        peptide_lengths : list of int\n            Peptide lengths to predict for.\n        use_flanks : bool\n            Whether to include flanking sequences when running the AP predictor\n            (for better cleavage prediction).\n        include_affinity_percentile : bool\n            Whether to include affinity percentile ranks in output.\n        verbose : int\n            Set to 0 for quiet mode.\n        throw : boolean\n            Whether to throw exceptions (vs. log warnings) on invalid inputs.\n\n        Returns\n        -------\n        pandas.DataFrame with columns:\n            peptide, n_flank, c_flank, sequence_name, affinity, best_allele,\n            processing_score, presentation_score\n        \"\"\"\n        if len(alleles) == 0:\n            alleles = {}\n\n        if len(alleles) > 0 and not self.supports_affinity_prediction:\n            raise ValueError(\n                \"Affinity prediction not supported by this predictor\")\n\n        if comparison_quantity is None:\n            if len(alleles) > 0:\n                if self.supports_presentation_prediction:\n                    comparison_quantity = \"presentation_score\"\n                else:\n                    comparison_quantity = \"affinity\"\n            else:\n                comparison_quantity = \"processing_score\"\n\n        if comparison_quantity == \"presentation_score\":\n            if not self.supports_presentation_prediction:\n                raise ValueError(\n                    \"Presentation prediction not supported by this predictor\")\n        elif comparison_quantity == \"processing_score\":\n            if not self.supports_processing_prediction:\n                raise ValueError(\n                    \"Processing prediction not supported by this predictor\")\n        elif comparison_quantity in (\"affinity\", \"affinity_percentile\"):\n            if not self.supports_affinity_prediction:\n                raise ValueError(\n                    \"Affinity prediction not supported by this predictor\")\n        else:\n            raise ValueError(\n                \"Unknown comparison quantity: %s\" % comparison_quantity)\n\n        processing_predictor = self.processing_predictor_with_flanks\n        if not use_flanks or processing_predictor is None:\n            processing_predictor = self.processing_predictor_without_flanks\n\n        if processing_predictor is not None:\n            supported_sequence_lengths = processing_predictor.sequence_lengths\n            n_flank_length = supported_sequence_lengths[\"n_flank\"]\n            c_flank_length = supported_sequence_lengths[\"c_flank\"]\n        else:\n            n_flank_length = 0\n            c_flank_length = 0\n\n        sequence_names = []\n        n_flanks = [] if use_flanks else None\n        c_flanks = [] if use_flanks else None\n        peptides = []\n\n        if isinstance(sequences, str):\n            sequences = [sequences]\n\n        if not isinstance(sequences, dict):\n            sequences = collections.OrderedDict(\n                (\"sequence_%04d\" % (i + 1), sequence)\n                for (i, sequence) in enumerate(sequences))\n\n        cross_product = True\n        if isinstance(alleles, str):\n            # Case (1) - alleles is a string\n            alleles = [alleles]\n\n        if isinstance(alleles, dict):\n            if any([isinstance(v, str) for v in alleles.values()]):\n                raise ValueError(\n                    \"The values in the alleles dict must be lists, not strings\")\n        else:\n            if all(isinstance(a, str) for a in alleles):\n                # Case (2) - a simple list of alleles\n                alleles = {\n                    'sample1': alleles\n                }\n            else:\n                # Case (3) - a list of lists\n                alleles = collections.OrderedDict(\n                    (\"genotype_%04d\" % (i + 1), genotype)\n                    for (i, genotype) in enumerate(alleles))\n                cross_product = False\n\n                if len(alleles) != len(sequences):\n                    raise ValueError(\n                        \"When passing a list of lists for the alleles argument \"\n                        \"the length of the list (%d) must match the length of \"\n                        \"the sequences being predicted (%d)\" % (\n                            len(alleles), len(sequences)))\n\n        if not isinstance(alleles, dict):\n            raise ValueError(\"Invalid type for alleles: \", type(alleles))\n\n        sample_names = None if cross_product else []\n        genotype_names = list(alleles)\n        position_in_sequence = []\n        for (i, (name, sequence)) in enumerate(sequences.items()):\n            genotype_name = None if cross_product else genotype_names[i]\n\n            if not isinstance(sequence, str):\n                raise ValueError(\"Expected string, not %s (%s)\" % (\n                    sequence, type(sequence)))\n            for peptide_start in range(len(sequence) - min(peptide_lengths) + 1):\n                n_flank_start = max(0, peptide_start - n_flank_length)\n                for peptide_length in peptide_lengths:\n                    peptide = sequence[\n                        peptide_start: peptide_start + peptide_length\n                    ]\n                    if len(peptide) != peptide_length:\n                        continue\n                    c_flank_end = (\n                        peptide_start + peptide_length + c_flank_length)\n                    sequence_names.append(name)\n                    position_in_sequence.append(peptide_start)\n                    if not cross_product:\n                        sample_names.append(genotype_name)\n                    peptides.append(peptide)\n                    if use_flanks:\n                        n_flanks.append(\n                            sequence[n_flank_start : peptide_start])\n                        c_flanks.append(\n                            sequence[peptide_start + peptide_length : c_flank_end])\n\n        result_df = self.predict(\n            peptides=peptides,\n            alleles=alleles,\n            n_flanks=n_flanks,\n            c_flanks=c_flanks,\n            sample_names=sample_names,\n            include_affinity_percentile=include_affinity_percentile,\n            verbose=verbose,\n            throw=throw)\n\n        result_df.insert(\n            0,\n            \"sequence_name\",\n            result_df.peptide_num.map(pandas.Series(sequence_names)))\n        result_df.insert(\n            1,\n            \"pos\",\n            result_df.peptide_num.map(pandas.Series(position_in_sequence)))\n        del result_df[\"peptide_num\"]\n\n        comparison_is_score = comparison_quantity.endswith(\"score\")\n\n        result_df = result_df.sort_values(\n            comparison_quantity,\n            ascending=not comparison_is_score)\n\n        if result == \"best\":\n            result_df = result_df.drop_duplicates(\n                [\"sequence_name\", \"sample_name\"], keep=\"first\"\n            ).sort_values(\"sequence_name\")\n        elif result == \"filtered\":\n            if comparison_is_score:\n                result_df = result_df.loc[\n                    result_df[comparison_quantity] >= filter_value\n                ]\n            else:\n                result_df = result_df.loc[\n                    result_df[comparison_quantity] <= filter_value\n                ]\n        elif result == \"all\":\n            pass\n        else:\n            raise ValueError(\n                \"Unknown result: %s. Valid choices are: best, filtered, all\"\n                % result)\n\n        result_df = result_df.reset_index(drop=True)\n        result_df = result_df.copy()\n\n        return result_df\n\n    def save(\n            self,\n            models_dir,\n            write_affinity_predictor=True,\n            write_processing_predictor=True,\n            write_weights=True,\n            write_percent_ranks=True,\n            write_info=True,\n            write_metdata=True):\n        \"\"\"\n        Save the predictor to a directory on disk. If the directory does\n        not exist it will be created.\n\n        The wrapped Class1AffinityPredictor and Class1ProcessingPredictor\n        instances are included in the saved data.\n\n        Parameters\n        ----------\n        models_dir : string\n            Path to directory. It will be created if it doesn't exist.\n        \"\"\"\n\n        if write_weights and self.weights_dataframe is None:\n            raise RuntimeError(\"Can't save before fitting\")\n\n        if not exists(models_dir):\n            mkdir(models_dir)\n\n        # Save underlying predictors\n        if write_affinity_predictor:\n            self.affinity_predictor.save(join(models_dir, \"affinity_predictor\"))\n        if write_processing_predictor:\n            if self.processing_predictor_with_flanks is not None:\n                self.processing_predictor_with_flanks.save(\n                    join(models_dir, \"processing_predictor_with_flanks\"))\n            if self.processing_predictor_without_flanks is not None:\n                self.processing_predictor_without_flanks.save(\n                    join(models_dir, \"processing_predictor_without_flanks\"))\n\n        if write_weights:\n            # Save model coefficients.\n            self.weights_dataframe.to_csv(join(models_dir, \"weights.csv\"))\n\n        if write_percent_ranks:\n            # Percent ranks\n            if self.percent_rank_transform:\n                series = self.percent_rank_transform.to_series()\n                percent_ranks_df = pandas.DataFrame(index=series.index)\n                numpy.testing.assert_array_almost_equal(\n                    series.index.values,\n                    percent_ranks_df.index.values)\n                percent_ranks_df[\"presentation_score\"] = series.values\n                percent_ranks_path = join(models_dir, \"percent_ranks.csv\")\n                percent_ranks_df.to_csv(\n                    percent_ranks_path,\n                    index=True,\n                    index_label=\"bin\")\n                logging.info(\"Wrote: %s\", percent_ranks_path)\n\n        if write_info:\n            # Write \"info.txt\"\n            info_path = join(models_dir, \"info.txt\")\n            rows = [\n                (\"trained on\", time.asctime()),\n                (\"package   \", \"mhcflurry %s\" % __version__),\n                (\"hostname  \", gethostname()),\n                (\"user      \", getuser()),\n            ]\n            pandas.DataFrame(rows).to_csv(\n                info_path, sep=\"\\t\", header=False, index=False)\n\n        if write_metdata:\n            if self.metadata_dataframes:\n                for (name, df) in self.metadata_dataframes.items():\n                    metadata_df_path = join(models_dir, \"%s.csv.bz2\" % name)\n                    df.to_csv(metadata_df_path, index=False, compression=\"bz2\")\n\n\n    @classmethod\n    def load(cls, models_dir=None, max_models=None):\n        \"\"\"\n        Deserialize a predictor from a directory on disk.\n\n        This will also load the wrapped Class1AffinityPredictor and\n        Class1ProcessingPredictor instances.\n\n        Parameters\n        ----------\n        models_dir : string\n            Path to directory. If unspecified the default downloaded models are\n            used.\n\n        max_models : int, optional\n            Maximum number of affinity and processing (counted separately)\n            models to load\n\n        Returns\n        -------\n        `Class1PresentationPredictor` instance\n        \"\"\"\n        if models_dir is None:\n            models_dir = get_default_class1_presentation_models_dir()\n\n        affinity_predictor = Class1AffinityPredictor.load(\n            join(models_dir, \"affinity_predictor\"), max_models=max_models)\n\n        processing_predictor_with_flanks = None\n        if exists(join(models_dir, \"processing_predictor_with_flanks\")):\n            processing_predictor_with_flanks = Class1ProcessingPredictor.load(\n                join(models_dir, \"processing_predictor_with_flanks\"),\n                max_models=max_models)\n        else:\n            logging.warning(\n                \"Presentation predictor is missing processing predictor: %s\",\n                join(models_dir, \"processing_predictor_with_flanks\"))\n\n        processing_predictor_without_flanks = None\n        if exists(join(models_dir, \"processing_predictor_without_flanks\")):\n            processing_predictor_without_flanks = Class1ProcessingPredictor.load(\n                join(models_dir, \"processing_predictor_without_flanks\"),\n                max_models=max_models)\n        else:\n            logging.warning(\n                \"Presentation predictor is missing processing predictor: %s\",\n                join(models_dir, \"processing_predictor_without_flanks\"))\n\n        weights_dataframe = pandas.read_csv(\n            join(models_dir, \"weights.csv\"),\n            index_col=0)\n\n        # Load percent ranks if available\n        percent_rank_transform = None\n        percent_ranks_path = join(models_dir, \"percent_ranks.csv\")\n        if exists(percent_ranks_path):\n            percent_ranks_df = pandas.read_csv(percent_ranks_path, index_col=0)\n            percent_rank_transform = PercentRankTransform.from_series(\n                percent_ranks_df[\"presentation_score\"])\n\n        provenance_string = None\n        try:\n            info_path = join(models_dir, \"info.txt\")\n            info = pandas.read_csv(\n                info_path, sep=\"\\t\", header=None, index_col=0).iloc[\n                :, 0\n            ].to_dict()\n            provenance_string = \"generated on %s\" % info[\"trained on\"]\n        except OSError:\n            pass\n\n        result = cls(\n            affinity_predictor=affinity_predictor,\n            processing_predictor_with_flanks=processing_predictor_with_flanks,\n            processing_predictor_without_flanks=processing_predictor_without_flanks,\n            weights_dataframe=weights_dataframe,\n            percent_rank_transform=percent_rank_transform,\n            provenance_string=provenance_string)\n        return result\n\n    def __repr__(self):\n        pieces = [\"at 0x%0x\" % id(self), \"[mhcflurry %s]\" % __version__]\n        if self.provenance_string:\n            pieces.append(self.provenance_string)\n        return \"<Class1PresentationPredictor %s>\" % \" \".join(pieces)\n\n    def percentile_ranks(self, presentation_scores, throw=True):\n        \"\"\"\n        Return percentile ranks for the given presentation scores.\n\n        Parameters\n        ----------\n        presentation_scores : sequence of float\n\n        Returns\n        -------\n        numpy.array of float\n        \"\"\"\n\n        if self.percent_rank_transform is None:\n            msg = \"No presentation predictor percentile rank information\"\n            if throw:\n                raise ValueError(msg)\n            warnings.warn(msg)\n            return numpy.ones(len(presentation_scores)) * numpy.nan\n\n        # We subtract from 100 so that strong binders have low percentile ranks,\n        # making them comparable to affinity percentile ranks.\n        return 100 - self.percent_rank_transform.transform(presentation_scores)\n\n    def calibrate_percentile_ranks(self, scores, bins=None):\n        \"\"\"\n        Compute the cumulative distribution of scores, to enable taking\n        quantiles of this distribution later.\n\n        Parameters\n        ----------\n        scores : sequence of float\n            Presentation prediction scores\n        bins : object\n            Anything that can be passed to numpy.histogram's \"bins\" argument\n            can be used here, i.e. either an integer or a sequence giving bin\n            edges.\n        \"\"\"\n        if bins is None:\n            bins = numpy.linspace(0, 1, 1000)\n\n        self.percent_rank_transform = PercentRankTransform()\n        self.percent_rank_transform.fit(scores, bins=bins)\n"
  },
  {
    "path": "mhcflurry/class1_processing_neural_network.py",
    "content": "\"\"\"\nAntigen processing neural network implementation - PyTorch version\n\"\"\"\n\nimport time\nimport collections\nimport gc\nimport json\nimport numpy\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .hyperparameters import HyperparameterDefaults\nfrom .class1_neural_network import DEFAULT_PREDICT_BATCH_SIZE\nfrom .flanking_encoding import FlankingEncoding\nfrom .common import get_pytorch_device\n\n\nclass Class1ProcessingModel(nn.Module):\n    \"\"\"\n    PyTorch module for antigen processing prediction.\n    \"\"\"\n\n    def __init__(\n            self,\n            sequence_dims,\n            n_flank_length,\n            c_flank_length,\n            peptide_max_length,\n            flanking_averages,\n            convolutional_filters,\n            convolutional_kernel_size,\n            convolutional_activation,\n            convolutional_kernel_l1_l2,\n            dropout_rate,\n            post_convolutional_dense_layer_sizes):\n        super(Class1ProcessingModel, self).__init__()\n\n        self.n_flank_length = n_flank_length\n        self.c_flank_length = c_flank_length\n        self.peptide_max_length = peptide_max_length\n        self.flanking_averages = flanking_averages\n\n        # Input channels from sequence encoding\n        in_channels = sequence_dims[1]\n\n        # Main convolutional layer\n        self.conv1 = nn.Conv1d(\n            in_channels=in_channels,\n            out_channels=convolutional_filters,\n            kernel_size=convolutional_kernel_size,\n            padding='same'\n        )\n\n        # Activation function\n        if convolutional_activation == 'tanh':\n            self.conv_activation = torch.tanh\n        elif convolutional_activation == 'relu':\n            self.conv_activation = F.relu\n        elif convolutional_activation == 'sigmoid':\n            self.conv_activation = torch.sigmoid\n        else:\n            self.conv_activation = torch.tanh\n\n        # Dropout\n        self.dropout_rate = dropout_rate\n        if dropout_rate > 0:\n            self.dropout = nn.Dropout1d(p=dropout_rate)\n        else:\n            self.dropout = None\n\n        # Post-convolutional dense layers for each flank\n        # These are implemented as 1D convolutions with kernel_size=1\n        layer_sizes = list(post_convolutional_dense_layer_sizes) + [1]\n\n        self.n_flank_post_convs = nn.ModuleList()\n        self.c_flank_post_convs = nn.ModuleList()\n\n        current_channels = convolutional_filters\n        for i, size in enumerate(layer_sizes):\n            self.n_flank_post_convs.append(nn.Conv1d(\n                in_channels=current_channels,\n                out_channels=size,\n                kernel_size=1\n            ))\n            self.c_flank_post_convs.append(nn.Conv1d(\n                in_channels=current_channels,\n                out_channels=size,\n                kernel_size=1\n            ))\n            current_channels = size\n\n        # Dense layers for flanking averages (if enabled)\n        self.n_flank_avg_dense = None\n        self.c_flank_avg_dense = None\n        if flanking_averages:\n            if n_flank_length > 0:\n                self.n_flank_avg_dense = nn.Linear(convolutional_filters, 1)\n            if c_flank_length > 0:\n                self.c_flank_avg_dense = nn.Linear(convolutional_filters, 1)\n\n        # Final output layer\n        # Number of inputs: 2 from n_flank (cleaved + max_pool) + 2 from c_flank\n        # Plus optional flanking averages\n        num_final_inputs = 4\n        if flanking_averages and n_flank_length > 0:\n            num_final_inputs += 1\n        if flanking_averages and c_flank_length > 0:\n            num_final_inputs += 1\n\n        self.output_layer = nn.Linear(num_final_inputs, 1)\n        # Initialize output weights to ones (like Keras initializers.Ones())\n        nn.init.ones_(self.output_layer.weight)\n        nn.init.zeros_(self.output_layer.bias)\n\n    def forward(self, inputs):\n        \"\"\"\n        Forward pass.\n\n        Parameters\n        ----------\n        inputs : dict\n            Dictionary with 'sequence' and 'peptide_length' keys\n\n        Returns\n        -------\n        torch.Tensor\n            Predictions of shape (batch,)\n        \"\"\"\n        sequence = inputs['sequence']  # (batch, seq_len, channels)\n        peptide_length = inputs['peptide_length']  # (batch, 1)\n\n        # Transpose for Conv1d: (batch, channels, seq_len)\n        x = sequence.permute(0, 2, 1)\n\n        # Apply main convolution\n        x = self.conv1(x)\n        x = self.conv_activation(x)\n\n        if self.dropout is not None:\n            # Spatial dropout: same dropout mask for all positions\n            # Equivalent to Keras Dropout with noise_shape=(None, 1, channels)\n            x = self.dropout(x)\n\n        # Transpose back: (batch, seq_len, channels)\n        convolutional_result = x.permute(0, 2, 1)\n\n        outputs_for_final = []\n\n        # Process n_flank\n        n_flank_outputs = self._process_n_flank(\n            convolutional_result, peptide_length\n        )\n        outputs_for_final.extend(n_flank_outputs)\n\n        # Process c_flank\n        c_flank_outputs = self._process_c_flank(\n            convolutional_result, peptide_length\n        )\n        outputs_for_final.extend(c_flank_outputs)\n\n        # Concatenate all outputs\n        combined = torch.cat(outputs_for_final, dim=-1)\n\n        # Final output\n        output = torch.sigmoid(self.output_layer(combined))\n        return output.squeeze(-1)\n\n    def _process_n_flank(self, conv_result, peptide_length):\n        \"\"\"Process n_flank feature extraction.\"\"\"\n        outputs = []\n\n        # Apply post-convolutional layers\n        # Transpose for Conv1d\n        x = conv_result.permute(0, 2, 1)\n        for i, conv_layer in enumerate(self.n_flank_post_convs):\n            x = conv_layer(x)\n            if i < len(self.n_flank_post_convs) - 1:\n                x = self.conv_activation(x)\n            else:\n                x = torch.tanh(x)  # Final layer always tanh\n        # Transpose back\n        single_output_result = x.permute(0, 2, 1)  # (batch, seq_len, 1)\n\n        # Extract at cleavage position (n_flank_length)\n        cleaved = single_output_result[:, self.n_flank_length, :]  # (batch, 1)\n        outputs.append(cleaved)\n\n        # Max pool over peptide (excluding first position)\n        max_pool = self._max_pool_over_peptide_n(\n            single_output_result, peptide_length\n        )\n        outputs.append(max_pool)\n\n        # Optional flanking average\n        if self.n_flank_avg_dense is not None and self.n_flank_length > 0:\n            avg = self._extract_n_flank_avg(conv_result)\n            dense_out = torch.tanh(self.n_flank_avg_dense(avg))  # (batch, 1)\n            outputs.append(dense_out)\n\n        return outputs\n\n    def _process_c_flank(self, conv_result, peptide_length):\n        \"\"\"Process c_flank feature extraction.\"\"\"\n        outputs = []\n\n        # Apply post-convolutional layers\n        x = conv_result.permute(0, 2, 1)\n        for i, conv_layer in enumerate(self.c_flank_post_convs):\n            x = conv_layer(x)\n            if i < len(self.c_flank_post_convs) - 1:\n                x = self.conv_activation(x)\n            else:\n                x = torch.tanh(x)\n        single_output_result = x.permute(0, 2, 1)  # (batch, seq_len, 1)\n\n        # Extract at cleavage position (dynamic based on peptide_length)\n        cleaved = self._extract_c_cleavage(single_output_result, peptide_length)\n        outputs.append(cleaved)\n\n        # Max pool over peptide (excluding last position)\n        max_pool = self._max_pool_over_peptide_c(\n            single_output_result, peptide_length\n        )\n        outputs.append(max_pool)\n\n        # Optional flanking average\n        if self.c_flank_avg_dense is not None and self.c_flank_length > 0:\n            # Average over c_flank region (dynamic based on peptide_length)\n            avg = self._extract_c_flank_avg(conv_result, peptide_length)\n            dense_out = torch.tanh(self.c_flank_avg_dense(avg))\n            outputs.append(dense_out)\n\n        return outputs\n\n    def _max_pool_over_peptide_n(self, x, peptide_length):\n        \"\"\"\n        Max pool over peptide region excluding first position.\n        For n_flank cleavage site.\n        \"\"\"\n        batch_size, seq_len, features = x.shape\n        peptide_length = peptide_length.view(-1)\n\n        # Create position indices\n        positions = torch.arange(seq_len, device=x.device).unsqueeze(0)\n\n        # Mask: 1 for positions from n_flank_length+1 to n_flank_length+peptide_length\n        starts = self.n_flank_length + 1\n        ends = (self.n_flank_length + peptide_length).unsqueeze(1)\n        mask = (positions >= starts) & (positions < ends)  # (batch, seq_len)\n\n        # Apply mask (assuming x >= -1 from tanh)\n        x_shifted = x + 1\n        mask_expanded = mask.unsqueeze(-1).float()\n        masked_x = x_shifted * mask_expanded\n        max_value = masked_x.max(dim=1)[0] - 1  # (batch, features)\n\n        # Flip sign\n        return -1 * max_value\n\n    def _max_pool_over_peptide_c(self, x, peptide_length):\n        \"\"\"\n        Max pool over peptide region excluding last position.\n        For c_flank cleavage site.\n        \"\"\"\n        batch_size, seq_len, features = x.shape\n        peptide_length = peptide_length.view(-1)\n\n        positions = torch.arange(seq_len, device=x.device).unsqueeze(0)\n\n        # Mask: 1 for positions from n_flank_length to n_flank_length+peptide_length-1\n        starts = self.n_flank_length\n        ends = (self.n_flank_length + peptide_length - 1).unsqueeze(1)\n        mask = (positions >= starts) & (positions < ends)\n\n        x_shifted = x + 1\n        mask_expanded = mask.unsqueeze(-1).float()\n        masked_x = x_shifted * mask_expanded\n        max_value = masked_x.max(dim=1)[0] - 1\n\n        return -1 * max_value\n\n    def _extract_c_cleavage(self, x, peptide_length):\n        \"\"\"Extract at c-terminal cleavage position.\"\"\"\n        peptide_length = peptide_length.view(-1)\n        indices = self.n_flank_length + peptide_length - 1\n\n        batch_size = x.size(0)\n        indices = indices.long().view(batch_size, 1, 1).expand(-1, -1, x.size(2))\n        result = x.gather(1, indices).squeeze(1)  # (batch, features)\n        return result\n\n    def _extract_c_flank_avg(self, conv_result, peptide_length):\n        \"\"\"\n        Average over c-flank region using TF-compatible masking semantics.\n\n        In TF/Keras this is implemented as:\n            reduce_mean((x + 1) * mask, axis=1) - 1\n        which averages across the full sequence axis (not only masked positions).\n        \"\"\"\n        batch_size, seq_len, features = conv_result.shape\n        peptide_length = peptide_length.view(-1)\n\n        positions = torch.arange(seq_len, device=conv_result.device).unsqueeze(0)\n\n        # Mask: 1 for c_flank positions\n        starts = (self.n_flank_length + peptide_length).unsqueeze(1)\n        ends = starts + self.c_flank_length\n        mask = (positions >= starts) & (positions < ends)\n\n        x_shifted = conv_result + 1\n        mask_expanded = mask.unsqueeze(-1).float()\n        avg_value = (x_shifted * mask_expanded).mean(dim=1) - 1\n\n        return avg_value\n\n    def _extract_n_flank_avg(self, conv_result):\n        \"\"\"\n        Average over n-flank region using TF-compatible masking semantics.\n\n        In TF/Keras this is implemented as:\n            reduce_mean((x + 1) * mask, axis=1) - 1\n        where mask selects n-flank positions.\n        \"\"\"\n        _, seq_len, _ = conv_result.shape\n\n        positions = torch.arange(seq_len, device=conv_result.device).unsqueeze(0)\n        mask = (positions >= 0) & (positions < self.n_flank_length)\n        mask_expanded = mask.unsqueeze(-1).float()\n\n        x_shifted = conv_result + 1\n        avg_value = (x_shifted * mask_expanded).mean(dim=1) - 1\n        return avg_value\n\n    def get_weights_list(self):\n        \"\"\"Get weights as a list of numpy arrays.\"\"\"\n        weights = []\n        for name, param in self.named_parameters():\n            weights.append(param.detach().cpu().numpy())\n        for name, buffer in self.named_buffers():\n            weights.append(buffer.detach().cpu().numpy())\n        return weights\n\n    def set_weights_list(self, weights, auto_convert_keras=True):\n        \"\"\"\n        Set weights from a list of numpy arrays.\n\n        Supports automatic detection and conversion of Keras-format weights.\n\n        Parameters\n        ----------\n        weights : list of numpy.ndarray\n        auto_convert_keras : bool\n            If True, automatically detect and convert Keras-format weights\n        \"\"\"\n        # Keras stores weights in layer definition order which interleaves\n        # n_flank and c_flank post-conv layers:\n        #   conv1, n_post_0, c_post_0, n_post_1, c_post_1, n_avg, c_avg, output\n        # PyTorch ModuleList stores:\n        #   conv1, n_post_0, n_post_1, c_post_0, c_post_1, n_avg, c_avg, output\n        # We need to reorder Keras weights to match PyTorch parameter order\n\n        if auto_convert_keras:\n            weights = self._reorder_keras_weights(list(weights))\n\n        idx = 0\n        for name, param in self.named_parameters():\n            w = weights[idx].astype(numpy.float32)\n\n            # Auto-detect and convert Keras weights if shapes don't match\n            if auto_convert_keras and w.shape != param.shape:\n                # Dense/Linear: Keras (in, out) -> PyTorch (out, in)\n                if len(w.shape) == 2 and w.shape == param.shape[::-1]:\n                    w = w.T\n                # Conv1D: Keras (k, in_ch, out_ch) -> PyTorch (out_ch, in_ch, k)\n                elif len(w.shape) == 3 and w.shape == (param.shape[2], param.shape[1], param.shape[0]):\n                    w = w.transpose(2, 1, 0)\n\n            if w.shape != param.shape:\n                raise ValueError(\n                    f\"Weight shape mismatch for {name}: \"\n                    f\"got {weights[idx].shape}, expected {param.shape}\"\n                )\n\n            param.data = torch.from_numpy(w).to(\n                device=param.device,\n                dtype=param.dtype,\n            )\n            idx += 1\n        for name, buffer in self.named_buffers():\n            tensor = torch.from_numpy(weights[idx]).to(\n                device=buffer.device,\n                dtype=buffer.dtype,\n            )\n            self._buffers[name] = tensor\n            idx += 1\n\n    def _reorder_keras_weights(self, weights):\n        \"\"\"\n        Reorder Keras weights to match PyTorch parameter order.\n\n        Keras interleaves n_flank and c_flank post-conv layers:\n            conv1, n_post_0, c_post_0, n_post_1, c_post_1, ..., n_avg, c_avg, output\n        PyTorch has:\n            conv1, n_post_0, n_post_1, ..., c_post_0, c_post_1, ..., n_avg, c_avg, output\n\n        Returns\n        -------\n        list of numpy.ndarray\n        \"\"\"\n        # Count how many post-conv layers there are (each has weight + bias)\n        n_post_conv_layers = len(self.n_flank_post_convs)\n        if n_post_conv_layers == 0:\n            return weights\n\n        # Find indices in Keras weight list\n        # Structure: conv1_w, conv1_b, [n_post_i_w, n_post_i_b, c_post_i_w, c_post_i_b]...,\n        #            n_avg_w, n_avg_b, c_avg_w, c_avg_b, out_w, out_b\n\n        reordered = []\n\n        # Conv1 weights (indices 0, 1)\n        reordered.append(weights[0])\n        reordered.append(weights[1])\n\n        # Keras has interleaved: n_post_0, c_post_0, n_post_1, c_post_1, ...\n        # We need: n_post_0, n_post_1, ..., c_post_0, c_post_1, ...\n        post_conv_start = 2\n        post_conv_end = post_conv_start + n_post_conv_layers * 4  # 4 = n_w, n_b, c_w, c_b per layer\n\n        # Extract n_flank and c_flank post-conv weights separately\n        n_flank_weights = []\n        c_flank_weights = []\n        for i in range(n_post_conv_layers):\n            keras_idx = post_conv_start + i * 4\n            n_flank_weights.append(weights[keras_idx])      # n_post_i_w\n            n_flank_weights.append(weights[keras_idx + 1])  # n_post_i_b\n            c_flank_weights.append(weights[keras_idx + 2])  # c_post_i_w\n            c_flank_weights.append(weights[keras_idx + 3])  # c_post_i_b\n\n        # Add in PyTorch order: all n_flank first, then all c_flank\n        reordered.extend(n_flank_weights)\n        reordered.extend(c_flank_weights)\n\n        # Remaining weights (avg dense and output) stay in same order\n        reordered.extend(weights[post_conv_end:])\n\n        return reordered\n\n\nclass Class1ProcessingNeuralNetwork(object):\n    \"\"\"\n    A neural network for antigen processing prediction\n    \"\"\"\n\n    network_hyperparameter_defaults = HyperparameterDefaults(\n        amino_acid_encoding=\"BLOSUM62\",\n        peptide_max_length=15,\n        n_flank_length=10,\n        c_flank_length=10,\n        flanking_averages=False,\n        convolutional_filters=16,\n        convolutional_kernel_size=8,\n        convolutional_activation=\"tanh\",\n        convolutional_kernel_l1_l2=[0.0001, 0.0001],\n        dropout_rate=0.5,\n        post_convolutional_dense_layer_sizes=[],\n    )\n    \"\"\"\n    Hyperparameters (and their default values) that affect the neural network\n    architecture.\n    \"\"\"\n\n    fit_hyperparameter_defaults = HyperparameterDefaults(\n        max_epochs=500,\n        validation_split=0.1,\n        early_stopping=True,\n        minibatch_size=256,\n    )\n    \"\"\"\n    Hyperparameters for neural network training.\n    \"\"\"\n\n    early_stopping_hyperparameter_defaults = HyperparameterDefaults(\n        patience=30,\n        min_delta=0.0,\n    )\n    \"\"\"\n    Hyperparameters for early stopping.\n    \"\"\"\n\n    compile_hyperparameter_defaults = HyperparameterDefaults(\n        optimizer=\"adam\",\n        learning_rate=None,\n    )\n    \"\"\"\n    Loss and optimizer hyperparameters.\n    \"\"\"\n\n    auxiliary_input_hyperparameter_defaults = HyperparameterDefaults()\n    \"\"\"\n    Allele feature hyperparameters.\n    \"\"\"\n\n    hyperparameter_defaults = (\n        network_hyperparameter_defaults.extend(fit_hyperparameter_defaults)\n        .extend(early_stopping_hyperparameter_defaults)\n        .extend(compile_hyperparameter_defaults)\n        .extend(auxiliary_input_hyperparameter_defaults)\n    )\n\n    def __init__(self, **hyperparameters):\n        self.hyperparameters = self.hyperparameter_defaults.with_defaults(\n            hyperparameters\n        )\n        self._network = None\n        self.network_json = None\n        self.network_weights = None\n        self.fit_info = []\n\n    @property\n    def sequence_lengths(self):\n        \"\"\"\n        Supported maximum sequence lengths\n\n        Returns\n        -------\n        dict of string -> int\n\n        Keys are \"peptide\", \"n_flank\", \"c_flank\". Values give the maximum\n        supported sequence length.\n        \"\"\"\n        return {\n            \"peptide\": self.hyperparameters[\"peptide_max_length\"],\n            \"n_flank\": self.hyperparameters[\"n_flank_length\"],\n            \"c_flank\": self.hyperparameters[\"c_flank_length\"],\n        }\n\n    def get_device(self):\n        \"\"\"Get the PyTorch device to use.\"\"\"\n        return get_pytorch_device()\n\n    def network(self):\n        \"\"\"\n        Return the PyTorch model associated with this network.\n        \"\"\"\n        if self._network is None and self.network_json is not None:\n            # Re-create the network using hyperparameters\n            self._network = self.make_network(\n                **self.network_hyperparameter_defaults.subselect(self.hyperparameters)\n            )\n            if self.network_weights is not None:\n                # Detect if weights are from Keras or PyTorch format\n                # Keras JSON has 'class_name': 'Model', PyTorch has 'hyperparameters'\n                try:\n                    config = json.loads(self.network_json)\n                    is_keras_format = config.get('class_name') in ('Model', 'Functional')\n                except (json.JSONDecodeError, TypeError):\n                    is_keras_format = False\n                self._network.set_weights_list(\n                    self.network_weights,\n                    auto_convert_keras=is_keras_format\n                )\n        return self._network\n\n    @staticmethod\n    def _regularized_parameters(network):\n        \"\"\"\n        Parameters subject to master-branch convolution kernel regularization.\n        \"\"\"\n        for name, param in network.named_parameters():\n            if not param.requires_grad or not name.endswith(\"weight\"):\n                continue\n            if (\n                    name == \"conv1.weight\" or\n                    \"n_flank_post_convs\" in name or\n                    \"c_flank_post_convs\" in name):\n                yield param\n\n    @staticmethod\n    def _regularization_penalty(parameters, l1=0.0, l2=0.0):\n        \"\"\"\n        Match Keras kernel_regularizer semantics used on convolution kernels.\n        \"\"\"\n        parameters = tuple(parameters)\n        if not parameters or (not l1 and not l2):\n            return None\n        penalty = torch.zeros((), device=parameters[0].device)\n        for param in parameters:\n            if l1:\n                penalty = penalty + (l1 * param.abs().sum())\n            if l2:\n                penalty = penalty + (l2 * param.square().sum())\n        return penalty\n\n    def update_network_description(self):\n        \"\"\"\n        Update self.network_json and self.network_weights properties based on\n        this instances's neural network.\n        \"\"\"\n        if self._network is not None:\n            # Store hyperparameters as JSON (not the actual model structure)\n            self.network_json = json.dumps({'hyperparameters': dict(self.hyperparameters)})\n            self.network_weights = self._network.get_weights_list()\n\n    def fit(\n        self,\n        sequences,\n        targets,\n        sample_weights=None,\n        shuffle_permutation=None,\n        verbose=1,\n        progress_callback=None,\n        progress_preamble=\"\",\n        progress_print_interval=5.0,\n    ):\n        \"\"\"\n        Fit the neural network.\n\n        Parameters\n        ----------\n        sequences : FlankingEncoding\n            Peptides and upstream/downstream flanking sequences\n        targets : list of float\n            1 indicates hit, 0 indicates decoy\n        sample_weights : list of float\n            If not specified all samples have equal weight.\n        shuffle_permutation : list of int\n            Permutation (integer list) of same length as peptides and affinities\n            If None, then a random permutation will be generated.\n        verbose : int\n            Verbosity level\n        progress_callback : function\n            No-argument function to call after each epoch.\n        progress_preamble : string\n            Optional string of information to include in each progress update\n        progress_print_interval : float\n            How often (in seconds) to print progress update. Set to None to\n            disable.\n        \"\"\"\n        device = self.get_device()\n\n        x_dict = self.network_input(sequences)\n\n        # Shuffle\n        if shuffle_permutation is None:\n            shuffle_permutation = numpy.random.permutation(len(targets))\n        targets = numpy.array(targets)[shuffle_permutation]\n        assert numpy.isnan(targets).sum() == 0, targets\n        if sample_weights is not None:\n            sample_weights = numpy.array(sample_weights)[shuffle_permutation]\n        for key in list(x_dict):\n            x_dict[key] = x_dict[key][shuffle_permutation]\n\n        fit_info = collections.defaultdict(list)\n\n        if self._network is None:\n            self._network = self.make_network(\n                **self.network_hyperparameter_defaults.subselect(self.hyperparameters)\n            )\n            if verbose > -1:\n                print(self._network)\n\n        network = self.network()\n        network.to(device)\n\n        # Setup optimizer\n        optimizer = self._create_optimizer(network)\n\n        # Loss function (binary cross-entropy)\n        loss_fn = nn.BCELoss(reduction='none')\n        reg_l1, reg_l2 = self.hyperparameters.get(\n            \"convolutional_kernel_l1_l2\",\n            [0.0, 0.0],\n        )\n        regularization_parameters = tuple(self._regularized_parameters(network))\n\n        # Validation split\n        val_split = self.hyperparameters[\"validation_split\"]\n        n_total = len(targets)\n        n_val = int(n_total * val_split)\n        n_train = n_total - n_val\n\n        indices = numpy.arange(n_total)\n        train_indices = indices[:n_train]\n        val_indices = indices[n_train:]\n\n        last_progress_print = None\n        min_val_loss_iteration = None\n        min_val_loss = None\n        start = time.time()\n\n        for epoch in range(self.hyperparameters[\"max_epochs\"]):\n            epoch_start = time.time()\n            network.train()\n\n            # Shuffle training indices each epoch\n            numpy.random.shuffle(train_indices)\n\n            batch_size = self.hyperparameters[\"minibatch_size\"]\n            train_losses = []\n\n            for batch_start in range(0, n_train, batch_size):\n                batch_idx = train_indices[batch_start:batch_start + batch_size]\n\n                seq_batch = torch.from_numpy(x_dict[\"sequence\"][batch_idx]).float().to(device)\n                length_batch = torch.from_numpy(x_dict[\"peptide_length\"][batch_idx]).to(device)\n                target_batch = torch.from_numpy(targets[batch_idx].astype(numpy.float32)).to(device)\n\n                inputs = {\"sequence\": seq_batch, \"peptide_length\": length_batch}\n\n                optimizer.zero_grad()\n                predictions = network(inputs)\n                loss = loss_fn(predictions, target_batch)\n\n                if sample_weights is not None:\n                    weight_batch = torch.from_numpy(\n                        sample_weights[batch_idx].astype(numpy.float32)\n                    ).to(device)\n                    loss = loss * weight_batch\n\n                loss = loss.mean()\n                regularization_penalty = self._regularization_penalty(\n                    regularization_parameters,\n                    l1=reg_l1,\n                    l2=reg_l2,\n                )\n                if regularization_penalty is not None:\n                    loss = loss + regularization_penalty\n                loss.backward()\n                optimizer.step()\n                train_losses.append(loss.item())\n\n            epoch_time = time.time() - epoch_start\n            train_loss = numpy.mean(train_losses)\n            fit_info[\"loss\"].append(train_loss)\n\n            # Validation\n            if val_split > 0:\n                network.eval()\n                with torch.no_grad():\n                    val_seq = torch.from_numpy(x_dict[\"sequence\"][val_indices]).float().to(device)\n                    val_length = torch.from_numpy(x_dict[\"peptide_length\"][val_indices]).to(device)\n                    val_targets = torch.from_numpy(targets[val_indices].astype(numpy.float32)).to(device)\n\n                    val_inputs = {\"sequence\": val_seq, \"peptide_length\": val_length}\n                    val_predictions = network(val_inputs)\n                    val_loss = loss_fn(val_predictions, val_targets)\n                    if sample_weights is not None:\n                        val_weights = torch.from_numpy(\n                            sample_weights[val_indices].astype(numpy.float32)\n                        ).to(device)\n                        val_loss = val_loss * val_weights\n                    val_loss = val_loss.mean()\n                    regularization_penalty = self._regularization_penalty(\n                        regularization_parameters,\n                        l1=reg_l1,\n                        l2=reg_l2,\n                    )\n                    if regularization_penalty is not None:\n                        val_loss = val_loss + regularization_penalty\n                    val_loss = val_loss.item()\n                fit_info[\"val_loss\"].append(val_loss)\n\n            gc.collect()\n\n            # Progress printing\n            if progress_print_interval is not None and (\n                not last_progress_print\n                or (time.time() - last_progress_print > progress_print_interval)\n            ):\n                print(\n                    (\n                        progress_preamble\n                        + \" \"\n                        + \"Epoch %3d / %3d [%0.2f sec]: loss=%g. \"\n                        \"Min val loss (%s) at epoch %s\"\n                        % (\n                            epoch,\n                            self.hyperparameters[\"max_epochs\"],\n                            epoch_time,\n                            fit_info[\"loss\"][-1],\n                            str(min_val_loss),\n                            min_val_loss_iteration,\n                        )\n                    ).strip()\n                )\n                last_progress_print = time.time()\n\n            if val_split > 0:\n                if min_val_loss is None or (\n                    val_loss < min_val_loss - self.hyperparameters[\"min_delta\"]\n                ):\n                    min_val_loss = val_loss\n                    min_val_loss_iteration = epoch\n\n                if self.hyperparameters[\"early_stopping\"]:\n                    threshold = (\n                        min_val_loss_iteration + self.hyperparameters[\"patience\"]\n                    )\n                    if epoch > threshold:\n                        if progress_print_interval is not None:\n                            print(\n                                (\n                                    progress_preamble\n                                    + \" \"\n                                    + \"Stopping at epoch %3d / %3d: loss=%g. \"\n                                    \"Min val loss (%g) at epoch %s\"\n                                    % (\n                                        epoch,\n                                        self.hyperparameters[\"max_epochs\"],\n                                        fit_info[\"loss\"][-1],\n                                        (\n                                            min_val_loss\n                                            if min_val_loss is not None\n                                            else numpy.nan\n                                        ),\n                                        min_val_loss_iteration,\n                                    )\n                                ).strip()\n                            )\n                        break\n\n            if progress_callback:\n                progress_callback()\n\n        fit_info[\"time\"] = time.time() - start\n        fit_info[\"num_points\"] = len(sequences.dataframe)\n        self.fit_info.append(dict(fit_info))\n\n        if verbose > -1:\n            print(\"Output weights\", self.network().output_layer.weight.data.cpu().numpy())\n\n    def _create_optimizer(self, network):\n        \"\"\"Create an optimizer for the network.\"\"\"\n        optimizer_name = self.hyperparameters[\"optimizer\"].lower()\n        lr = (\n            self.hyperparameters[\"learning_rate\"]\n            if self.hyperparameters[\"learning_rate\"] is not None\n            else 0.001\n        )\n\n        if optimizer_name == \"adam\":\n            # Match Keras default epsilon=1e-07.\n            return torch.optim.Adam(network.parameters(), lr=lr, eps=1e-07)\n        elif optimizer_name == \"rmsprop\":\n            # Match Keras defaults: rho=0.9, epsilon=1e-07.\n            return torch.optim.RMSprop(network.parameters(), lr=lr, alpha=0.9, eps=1e-07)\n        elif optimizer_name == \"sgd\":\n            return torch.optim.SGD(network.parameters(), lr=lr)\n        else:\n            return torch.optim.Adam(network.parameters(), lr=lr, eps=1e-07)\n\n    def predict(\n        self,\n        peptides,\n        n_flanks=None,\n        c_flanks=None,\n        batch_size=DEFAULT_PREDICT_BATCH_SIZE,\n    ):\n        \"\"\"\n        Predict antigen processing.\n\n        Parameters\n        ----------\n        peptides : list of string\n            Peptide sequences\n        n_flanks : list of string\n            Upstream sequence before each peptide\n        c_flanks : list of string\n            Downstream sequence after each peptide\n        batch_size : int\n            Prediction batch size.\n\n        Returns\n        -------\n        numpy.array\n\n        Processing scores. Range is 0-1, higher indicates more favorable\n        processing.\n        \"\"\"\n        if n_flanks is None:\n            n_flanks = [\"\"] * len(peptides)\n        if c_flanks is None:\n            c_flanks = [\"\"] * len(peptides)\n\n        sequences = FlankingEncoding(\n            peptides=peptides, n_flanks=n_flanks, c_flanks=c_flanks\n        )\n        return self.predict_encoded(sequences=sequences, batch_size=batch_size)\n\n    def predict_encoded(\n        self, sequences, throw=True, batch_size=DEFAULT_PREDICT_BATCH_SIZE\n    ):\n        \"\"\"\n        Predict antigen processing.\n\n        Parameters\n        ----------\n        sequences : FlankingEncoding\n            Peptides and flanking sequences\n        throw : boolean\n            Whether to throw exception on unsupported peptides\n        batch_size : int\n            Prediction batch size.\n\n        Returns\n        -------\n        numpy.array\n        \"\"\"\n        device = self.get_device()\n\n        x_dict = self.network_input(sequences, throw=throw)\n        network = self.network()\n        network.to(device)\n        network.eval()\n\n        n_samples = len(x_dict[\"sequence\"])\n        all_predictions = []\n\n        def prediction_tensor(batch_array):\n            batch_array = numpy.asarray(batch_array)\n            if not batch_array.flags.writeable:\n                batch_array = batch_array.copy()\n            return torch.from_numpy(batch_array).to(device)\n\n        with torch.no_grad():\n            for batch_start in range(0, n_samples, batch_size):\n                batch_end = min(batch_start + batch_size, n_samples)\n\n                seq_batch = prediction_tensor(\n                    x_dict[\"sequence\"][batch_start:batch_end]\n                ).float()\n                length_batch = prediction_tensor(\n                    x_dict[\"peptide_length\"][batch_start:batch_end]\n                )\n\n                inputs = {\"sequence\": seq_batch, \"peptide_length\": length_batch}\n                batch_predictions = network(inputs)\n                all_predictions.append(batch_predictions.cpu().numpy())\n\n        raw_predictions = numpy.concatenate(all_predictions, axis=0)\n        predictions = numpy.array(raw_predictions, dtype=\"float64\")\n        return predictions\n\n    def network_input(self, sequences, throw=True):\n        \"\"\"\n        Encode peptides to the fixed-length encoding expected by the neural\n        network (which depends on the architecture).\n\n        Parameters\n        ----------\n        sequences : FlankingEncoding\n            Peptides and flanking sequences\n        throw : boolean\n            Whether to throw exception on unsupported peptides\n\n        Returns\n        -------\n        dict\n        \"\"\"\n        encoded = sequences.vector_encode(\n            self.hyperparameters[\"amino_acid_encoding\"],\n            self.hyperparameters[\"peptide_max_length\"],\n            n_flank_length=self.hyperparameters[\"n_flank_length\"],\n            c_flank_length=self.hyperparameters[\"c_flank_length\"],\n            allow_unsupported_amino_acids=True,\n            throw=throw,\n        )\n\n        result = {\n            \"sequence\": encoded.array,\n            \"peptide_length\": encoded.peptide_lengths,\n        }\n        return result\n\n    def make_network(\n        self,\n        amino_acid_encoding,\n        peptide_max_length,\n        n_flank_length,\n        c_flank_length,\n        flanking_averages,\n        convolutional_filters,\n        convolutional_kernel_size,\n        convolutional_activation,\n        convolutional_kernel_l1_l2,\n        dropout_rate,\n        post_convolutional_dense_layer_sizes,\n    ):\n        \"\"\"\n        Helper function to make a PyTorch network given hyperparameters.\n        \"\"\"\n        empty_x_dict = self.network_input(FlankingEncoding([], [], []))\n        sequence_dims = empty_x_dict[\"sequence\"].shape[1:]\n\n        numpy.testing.assert_equal(\n            sequence_dims[0], peptide_max_length + n_flank_length + c_flank_length\n        )\n\n        return Class1ProcessingModel(\n            sequence_dims=sequence_dims,\n            n_flank_length=n_flank_length,\n            c_flank_length=c_flank_length,\n            peptide_max_length=peptide_max_length,\n            flanking_averages=flanking_averages,\n            convolutional_filters=convolutional_filters,\n            convolutional_kernel_size=convolutional_kernel_size,\n            convolutional_activation=convolutional_activation,\n            convolutional_kernel_l1_l2=convolutional_kernel_l1_l2,\n            dropout_rate=dropout_rate,\n            post_convolutional_dense_layer_sizes=post_convolutional_dense_layer_sizes,\n        )\n\n    def __getstate__(self):\n        \"\"\"\n        serialize to a dict. Model weights are included. For pickle support.\n\n        Returns\n        -------\n        dict\n\n        \"\"\"\n        self.update_network_description()\n        result = dict(self.__dict__)\n        result[\"_network\"] = None\n        return result\n\n    def __setstate__(self, state):\n        \"\"\"\n        Deserialize. For pickle support.\n        \"\"\"\n        self.__dict__.update(state)\n\n    def get_weights(self):\n        \"\"\"\n        Get the network weights\n\n        Returns\n        -------\n        list of numpy.array giving weights for each layer or None if there is no\n        network\n        \"\"\"\n        self.update_network_description()\n        return self.network_weights\n\n    def get_config(self):\n        \"\"\"\n        serialize to a dict all attributes except model weights\n\n        Returns\n        -------\n        dict\n        \"\"\"\n        self.update_network_description()\n        result = dict(self.__dict__)\n        del result[\"_network\"]\n        result[\"network_weights\"] = None\n        return result\n\n    @classmethod\n    def from_config(cls, config, weights=None):\n        \"\"\"\n        deserialize from a dict returned by get_config().\n\n        Parameters\n        ----------\n        config : dict\n        weights : list of array, optional\n            Network weights to restore\n\n        Returns\n        -------\n        Class1ProcessingNeuralNetwork\n        \"\"\"\n        config = dict(config)\n        instance = cls(**config.pop(\"hyperparameters\"))\n        instance.__dict__.update(config)\n        instance.network_weights = weights\n        assert instance._network is None\n        return instance\n"
  },
  {
    "path": "mhcflurry/class1_processing_predictor.py",
    "content": "from os.path import join, exists, abspath\nfrom os import mkdir\nfrom socket import gethostname\nfrom getpass import getuser\n\nimport time\nimport json\nimport hashlib\nimport logging\nimport collections\n\nimport numpy\nimport pandas\n\nfrom .version import __version__\nfrom .class1_neural_network import DEFAULT_PREDICT_BATCH_SIZE\nfrom .flanking_encoding import FlankingEncoding\nfrom .downloads import get_default_class1_processing_models_dir\nfrom .class1_processing_neural_network import Class1ProcessingNeuralNetwork\nfrom .common import save_weights, load_weights, NumpyJSONEncoder\n\n\nclass Class1ProcessingPredictor(object):\n    \"\"\"\n    User-facing interface to antigen processing prediction.\n\n    Delegates to an ensemble of Class1ProcessingNeuralNetwork instances.\n    \"\"\"\n    def __init__(\n            self,\n            models,\n            manifest_df=None,\n            metadata_dataframes=None,\n            provenance_string=None):\n        \"\"\"\n        Instantiate a new Class1ProcessingPredictor\n\n        Users will generally call load() to restore a saved predictor rather\n        than using this constructor.\n\n        Parameters\n        ----------\n        models : list of Class1ProcessingNeuralNetwork\n            Neural networks in the ensemble.\n        manifest_df : pandas.DataFrame\n            Manifest dataframe. If not specified a new one will be created when\n            needed.\n        metadata_dataframes : dict of string -> pandas.DataFrame\n            Arbitrary metadata associated with this predictor\n        provenance_string : string, optional\n            Optional info string to use in __str__.\n        \"\"\"\n        self.models = models\n        self._manifest_df = manifest_df\n        self.metadata_dataframes = (\n            dict(metadata_dataframes) if metadata_dataframes else {})\n        self.provenance_string = provenance_string\n\n    @property\n    def sequence_lengths(self):\n        \"\"\"\n        Supported maximum sequence lengths.\n\n        Passing a peptide greater than the maximum supported length results\n        in an error.\n\n        Passing an N- or C-flank sequence greater than the maximum supported\n        length results in some part of it being ignored.\n\n        Returns\n        -------\n        dict of string -> int\n\n        Keys are \"peptide\", \"n_flank\", \"c_flank\". Values give the maximum\n        supported sequence length.\n        \"\"\"\n        df = pandas.DataFrame([model.sequence_lengths for model in self.models])\n        return {\n            \"peptide\": df.peptide.min(),  # min: anything greater is error\n            \"n_flank\": df.n_flank.max(),  # max: anything greater is ignored\n            \"c_flank\": df.c_flank.max(),\n        }\n\n    def add_models(self, models):\n        \"\"\"\n        Add models to the ensemble (in-place).\n\n        Parameters\n        ----------\n        models : list of Class1ProcessingNeuralNetwork\n\n        Returns\n        -------\n        list of string\n\n        Names of the new models.\n        \"\"\"\n        new_model_names = []\n        original_manifest = self.manifest_df\n        new_manifest_rows = []\n        for model in models:\n            model_name = self.model_name(len(self.models))\n            row = pandas.Series(collections.OrderedDict([\n                (\"model_name\", model_name),\n                (\"config_json\", json.dumps(\n                    model.get_config(), cls=NumpyJSONEncoder)),\n                (\"model\", model),\n            ])).to_frame().T\n            new_manifest_rows.append(row)\n            self.models.append(model)\n            new_model_names.append(model_name)\n\n        self._manifest_df = pandas.concat(\n            [original_manifest] + new_manifest_rows,\n            ignore_index=True)\n\n        self.check_consistency()\n        return new_model_names\n\n\n    @property\n    def manifest_df(self):\n        \"\"\"\n        A pandas.DataFrame describing the models included in this predictor.\n\n        Returns\n        -------\n        pandas.DataFrame\n        \"\"\"\n        if self._manifest_df is None:\n            rows = []\n            for (i, model) in enumerate(self.models):\n                model_config = model.get_config()\n                rows.append((\n                    self.model_name(i),\n                    json.dumps(model_config, cls=NumpyJSONEncoder),\n                    model\n                ))\n            self._manifest_df = pandas.DataFrame(\n                rows,\n                columns=[\"model_name\", \"config_json\", \"model\"])\n        return self._manifest_df\n\n    @staticmethod\n    def model_name(num):\n        \"\"\"\n        Generate a model name\n\n        Returns\n        -------\n        string\n\n        \"\"\"\n        random_string = hashlib.sha1(\n            str(time.time()).encode()).hexdigest()[:16]\n        return \"CLEAVAGE-CLASSI-%d-%s\" % (\n            num,\n            random_string)\n\n    @staticmethod\n    def weights_path(models_dir, model_name):\n        \"\"\"\n        Generate the path to the weights file for a model\n\n        Parameters\n        ----------\n        models_dir : string\n        model_name : string\n\n        Returns\n        -------\n        string\n        \"\"\"\n        return join(models_dir, \"weights_%s.npz\" % model_name)\n\n    def predict(\n            self,\n            peptides,\n            n_flanks=None,\n            c_flanks=None,\n            throw=True,\n            batch_size=DEFAULT_PREDICT_BATCH_SIZE):\n        \"\"\"\n        Predict antigen processing.\n\n        Parameters\n        ----------\n        peptides : list of string\n            Peptide sequences\n        n_flanks : list of string\n            Upstream sequence before each peptide\n        c_flanks : list of string\n            Downstream sequence after each peptide\n        throw : boolean\n            If True, a ValueError will be raised in the case of unsupported\n            peptides. If False, a warning will be logged and the predictions\n            for those peptides will be NaN.\n        batch_size : int\n            Prediction batch size.\n\n        Returns\n        -------\n        numpy.array\n\n        Processing scores. Range is 0-1, higher indicates more favorable\n        processing.\n        \"\"\"\n        return self.predict_to_dataframe(\n            peptides=peptides,\n            n_flanks=n_flanks,\n            c_flanks=c_flanks,\n            throw=throw,\n            batch_size=batch_size).score.values\n\n    def predict_to_dataframe(\n            self,\n            peptides,\n            n_flanks=None,\n            c_flanks=None,\n            throw=True,\n            batch_size=DEFAULT_PREDICT_BATCH_SIZE):\n        \"\"\"\n        Predict antigen processing.\n\n        See `predict` method for parameter descriptions.\n\n        Returns\n        -------\n        pandas.DataFrame\n\n        Processing predictions are in the \"score\" column. Also includes\n        peptides and flanking sequences.\n        \"\"\"\n\n        if n_flanks is None:\n            n_flanks = [\"\"] * len(peptides)\n        if c_flanks is None:\n            c_flanks = [\"\"] * len(peptides)\n\n        sequences = FlankingEncoding(\n            peptides=peptides, n_flanks=n_flanks, c_flanks=c_flanks)\n        return self.predict_to_dataframe_encoded(\n            sequences=sequences, throw=throw, batch_size=batch_size)\n\n    def predict_to_dataframe_encoded(\n            self, sequences, throw=True, batch_size=DEFAULT_PREDICT_BATCH_SIZE):\n        \"\"\"\n        Predict antigen processing.\n\n        See `predict` method for more information.\n\n        Parameters\n        ----------\n        sequences : FlankingEncoding\n        batch_size : int\n        throw : boolean\n\n        Returns\n        -------\n        pandas.DataFrame\n        \"\"\"\n\n        score_array = []\n\n        for (i, network) in enumerate(self.models):\n            predictions = network.predict_encoded(\n                sequences, throw=throw, batch_size=batch_size)\n            score_array.append(predictions)\n\n        score_array = numpy.array(score_array)\n\n        result_df = pandas.DataFrame({\n            \"peptide\": sequences.dataframe.peptide,\n            \"n_flank\": sequences.dataframe.n_flank,\n            \"c_flank\": sequences.dataframe.c_flank,\n            \"score\": numpy.mean(score_array, axis=0),\n        })\n        return result_df\n\n    def check_consistency(self):\n        \"\"\"\n        Verify that self.manifest_df is consistent with instance variables.\n\n        Currently only checks for agreement on the total number of models.\n\n        Throws AssertionError if inconsistent.\n        \"\"\"\n        assert len(self.manifest_df) == len(self.models), (\n            \"Manifest seems out of sync with models: %d vs %d entries: \\n%s\"% (\n                len(self.manifest_df),\n                len(self.models),\n                str(self.manifest_df)))\n\n    def save(self, models_dir, model_names_to_write=None, write_metadata=True):\n        \"\"\"\n        Serialize the predictor to a directory on disk. If the directory does\n        not exist it will be created.\n\n        The serialization format consists of a file called \"manifest.csv\" with\n        the configurations of each Class1ProcessingNeuralNetwork, along with\n        per-network files giving the model weights.\n\n        Parameters\n        ----------\n        models_dir : string\n            Path to directory. It will be created if it doesn't exist.\n        \"\"\"\n        self.check_consistency()\n\n        if model_names_to_write is None:\n            # Write all models\n            model_names_to_write = self.manifest_df.model_name.values\n\n        if not exists(models_dir):\n            mkdir(models_dir)\n\n        sub_manifest_df = self.manifest_df.loc[\n            self.manifest_df.model_name.isin(model_names_to_write)\n        ].copy()\n\n        # Network JSON configs may have changed since the models were added,\n        # so we update the JSON configs here also.\n        updated_network_config_jsons = []\n        for (_, row) in sub_manifest_df.iterrows():\n            updated_network_config_jsons.append(\n                json.dumps(row.model.get_config(), cls=NumpyJSONEncoder))\n            weights_path = self.weights_path(models_dir, row.model_name)\n            save_weights(row.model.get_weights(), weights_path)\n            logging.info(\"Wrote: %s\", weights_path)\n        sub_manifest_df[\"config_json\"] = updated_network_config_jsons\n        self.manifest_df.loc[\n            sub_manifest_df.index,\n            \"config_json\"\n        ] = updated_network_config_jsons\n\n        write_manifest_df = self.manifest_df[[\n            c for c in self.manifest_df.columns if c != \"model\"\n        ]]\n        manifest_path = join(models_dir, \"manifest.csv\")\n        write_manifest_df.to_csv(manifest_path, index=False)\n        logging.info(\"Wrote: %s\", manifest_path)\n\n        if write_metadata:\n            # Write \"info.txt\"\n            info_path = join(models_dir, \"info.txt\")\n            rows = [\n                (\"trained on\", time.asctime()),\n                (\"package   \", \"mhcflurry %s\" % __version__),\n                (\"hostname  \", gethostname()),\n                (\"user      \", getuser()),\n            ]\n            pandas.DataFrame(rows).to_csv(\n                info_path, sep=\"\\t\", header=False, index=False)\n\n            if self.metadata_dataframes:\n                for (name, df) in self.metadata_dataframes.items():\n                    metadata_df_path = join(models_dir, \"%s.csv.bz2\" % name)\n                    df.to_csv(metadata_df_path, index=False, compression=\"bz2\")\n\n    @classmethod\n    def load(cls, models_dir=None, max_models=None):\n        \"\"\"\n        Deserialize a predictor from a directory on disk.\n\n        Parameters\n        ----------\n        models_dir : string\n            Path to directory. If unspecified the default downloaded models are\n            used.\n\n        max_models : int, optional\n            Maximum number of models to load\n\n        Returns\n        -------\n        `Class1ProcessingPredictor` instance\n        \"\"\"\n        if models_dir is None:\n            models_dir = get_default_class1_processing_models_dir()\n\n        manifest_path = join(models_dir, \"manifest.csv\")\n        manifest_df = pandas.read_csv(manifest_path, nrows=max_models)\n\n        models = []\n        for (_, row) in manifest_df.iterrows():\n            weights_filename = cls.weights_path(models_dir, row.model_name)\n            config = json.loads(row.config_json)\n            model = Class1ProcessingNeuralNetwork.from_config(\n                config,\n                weights=load_weights(abspath(weights_filename)))\n            models.append(model)\n\n        manifest_df[\"model\"] = models\n\n        logging.info(\"Loaded %d class1 processing models\", len(models))\n\n        provenance_string = None\n        try:\n            info_path = join(models_dir, \"info.txt\")\n            info = pandas.read_csv(\n                info_path, sep=\"\\t\", header=None, index_col=0).iloc[\n                :, 0\n            ].to_dict()\n            provenance_string = \"generated on %s\" % info[\"trained on\"]\n        except OSError:\n            pass\n\n        result = cls(\n            models=models,\n            manifest_df=manifest_df,\n            provenance_string=provenance_string)\n        return result\n\n    def __repr__(self):\n        pieces = [\"at 0x%0x\" % id(self), \"[mhcflurry %s]\" % __version__]\n        if self.provenance_string:\n            pieces.append(self.provenance_string)\n        return \"<Class1ProcessingPredictor %s>\" % \" \".join(pieces)\n"
  },
  {
    "path": "mhcflurry/cluster_parallelism.py",
    "content": "\"\"\"\nSimple, relatively naive parallel map implementation for HPC clusters.\n\nUsed for training MHCflurry models.\n\"\"\"\nimport traceback\nimport sys\nimport os\nimport time\nimport signal\nimport argparse\nimport pickle\nimport subprocess\nimport shutil\n\nfrom .local_parallelism import call_wrapped_kwargs\nfrom .class1_affinity_predictor import Class1AffinityPredictor\n\ntry:\n    from shlex import quote\nexcept ImportError:\n    from pipes import quote\n\n\ndef add_cluster_parallelism_args(parser):\n    \"\"\"\n    Add commandline arguments controlling cluster parallelism to an argparse\n    ArgumentParser.\n\n    Parameters\n    ----------\n    parser : argparse.ArgumentParser\n    \"\"\"\n    group = parser.add_argument_group(\"Cluster parallelism\")\n    group.add_argument(\n        \"--cluster-parallelism\",\n        default=False,\n        action=\"store_true\")\n    group.add_argument(\n        \"--cluster-submit-command\",\n        default='sh',\n        help=\"Default: %(default)s\")\n    group.add_argument(\n        \"--cluster-results-workdir\",\n        default='./cluster-workdir',\n        help=\"Default: %(default)s\")\n    group.add_argument(\n        \"--additional-complete-file\",\n        default='STDERR',\n        help=\"Additional file to monitor for job completion. Default: %(default)s\")\n    group.add_argument(\n        '--cluster-script-prefix-path',\n        help=\"\",\n    )\n    group.add_argument(\n        '--cluster-max-retries',\n        type=int,\n        help=\"How many times to rerun failing jobs. Default: %(default)s\",\n        default=3)\n\n\ndef cluster_results_from_args(\n        args,\n        work_function,\n        work_items,\n        constant_data=None,\n        input_serialization_method=\"pickle\",\n        result_serialization_method=\"pickle\",\n        clear_constant_data=False):\n    \"\"\"\n    Parallel map configurable using commandline arguments. See the\n    cluster_results() function for docs.\n\n    The `args` parameter should be an argparse.Namespace from an argparse parser\n    generated using the add_cluster_parallelism_args() function.\n\n\n    Parameters\n    ----------\n    args\n    work_function\n    work_items\n    constant_data\n    result_serialization_method\n    clear_constant_data\n\n    Returns\n    -------\n    generator\n    \"\"\"\n    return cluster_results(\n        work_function=work_function,\n        work_items=work_items,\n        constant_data=constant_data,\n        submit_command=args.cluster_submit_command,\n        results_workdir=args.cluster_results_workdir,\n        additional_complete_file=args.additional_complete_file,\n        script_prefix_path=args.cluster_script_prefix_path,\n        input_serialization_method=input_serialization_method,\n        result_serialization_method=result_serialization_method,\n        max_retries=args.cluster_max_retries,\n        clear_constant_data=clear_constant_data\n    )\n\n\ndef cluster_results(\n        work_function,\n        work_items,\n        constant_data=None,\n        submit_command=\"sh\",\n        results_workdir=\"./cluster-workdir\",\n        additional_complete_file=None,\n        script_prefix_path=None,\n        input_serialization_method=\"pickle\",\n        result_serialization_method=\"pickle\",\n        max_retries=3,\n        clear_constant_data=False):\n    \"\"\"\n    Parallel map on an HPC cluster.\n\n    Returns [work_function(item) for item in work_items] where each invocation\n    of work_function is performed as a separate HPC cluster job. Order is\n    preserved.\n\n    Optionally, \"constant data\" can be specified, which will be passed to\n    each work_function() invocation as a keyword argument called constant_data.\n    This data is serialized once and all workers read it from the same source,\n    which is more efficient than serializing it separately for each worker.\n\n    Each worker's input is serialized to a shared NFS directory and the\n    submit_command is used to launch a job to process that input. The shared\n    filesystem is polled occasionally to watch for results, which are fed back\n    to the user.\n\n    Parameters\n    ----------\n    work_function : A -> B\n    work_items : list of A\n    constant_data : object\n    submit_command : string\n        For running on LSF, we use \"bsub\" here.\n    results_workdir : string\n        Path to NFS shared directory where inputs and results can be written\n    script_prefix_path : string\n        Path to script that will be invoked to run each worker. A line calling\n        the _mhcflurry-cluster-worker-entry-point command will be appended to\n        the contents of this file.\n    result_serialization_method : string, one of \"pickle\" or \"save_predictor\"\n        The \"save_predictor\" works only when the return type of work_function\n        is Class1AffinityPredictor\n    max_retries : int\n        How many times to attempt to re-launch a failed worker\n    clear_constant_data : bool\n        If True, the constant data dict is cleared on the launching host after\n        it is serialized to disk.\n\n    Returns\n    -------\n    generator of B\n    \"\"\"\n\n    if input_serialization_method == \"dill\":\n        import dill\n        input_serialization_module = dill\n    else:\n        assert input_serialization_method == \"pickle\"\n        input_serialization_module = pickle\n\n    constant_payload = {\n        'constant_data': constant_data,\n        'function': work_function,\n    }\n    if not os.path.exists(results_workdir):\n        os.mkdir(results_workdir)\n\n    work_dir = os.path.join(\n        os.path.abspath(results_workdir),\n        str(int(time.time())))\n    os.mkdir(work_dir)\n\n    constant_payload_path = os.path.join(\n        work_dir,\n        \"global_data.\" + input_serialization_method)\n    with open(constant_payload_path, \"wb\") as fd:\n        input_serialization_module.dump(\n            constant_payload,\n            fd,\n            protocol=input_serialization_module.HIGHEST_PROTOCOL)\n    print(\"Wrote:\", constant_payload_path)\n    if clear_constant_data:\n        constant_data.clear()\n        print(\"Cleared constant data to free up memory.\")\n\n    if script_prefix_path:\n        with open(script_prefix_path) as fd:\n            script_prefix = fd.read()\n    else:\n        script_prefix = \"#!/bin/bash\"\n\n    result_items = []\n\n    for (i, item) in enumerate(work_items):\n        item_workdir = os.path.join(\n            work_dir, \"work-item.%03d-of-%03d\" % (i, len(work_items)))\n        os.mkdir(item_workdir)\n\n        item_data_path = os.path.join(\n            item_workdir, \"data.\" + input_serialization_method)\n        with open(item_data_path, \"wb\") as fd:\n            input_serialization_module.dump(\n                item, fd, protocol=input_serialization_module.HIGHEST_PROTOCOL)\n        print(\"Wrote:\", item_data_path)\n\n        item_result_path = os.path.join(item_workdir, \"result\")\n        item_error_path = os.path.join(item_workdir, \"error.pkl\")\n        item_finished_path = os.path.join(item_workdir, \"COMPLETE\")\n\n        item_script_pieces = [\n            script_prefix.format(work_item_num=i, work_dir=item_workdir)\n        ]\n        worker_command = os.environ.get(\n            \"MHCFLURRY_CLUSTER_WORKER_COMMAND\",\n            \"_mhcflurry-cluster-worker-entry-point\"\n        )\n        item_script_pieces.append(\" \".join([\n            worker_command,\n            \"--constant-data\", quote(constant_payload_path),\n            \"--worker-data\", quote(item_data_path),\n            \"--result-out\", quote(item_result_path),\n            \"--error-out\", quote(item_error_path),\n            \"--complete-dir\", quote(item_finished_path),\n            \"--input-serialization-method\", input_serialization_method,\n            \"--result-serialization-method\", result_serialization_method,\n        ]))\n        item_script = \"\\n\".join(item_script_pieces)\n        item_script_path = os.path.join(\n            item_workdir,\n            \"run.%d.sh\" % i)\n        with open(item_script_path, \"w\") as fd:\n            fd.write(item_script)\n        print(\"Wrote:\", item_script_path)\n\n        launch_command = \" \".join([\n            submit_command, \"<\", quote(item_script_path)\n        ])\n        subprocess.check_call(launch_command, shell=True)\n        print(\"Invoked\", launch_command)\n\n        result_items.append({\n            'work_dir': item_workdir,\n            'finished_path': item_finished_path,\n            'result_path': item_result_path,\n            'error_path': item_error_path,\n            'retry_num': 0,\n            'launch_command': launch_command,\n        })\n\n    def result_generator():\n        additional_complete_file_path = None\n        start = time.time()\n        while result_items:\n            print(\"[%0.1f sec elapsed] waiting on %d / %d items.\" % (\n                time.time() - start, len(result_items), len(work_items)))\n            while True:\n                result_item = None\n                for d in result_items:\n                    if additional_complete_file:\n                        additional_complete_file_path = os.path.join(\n                            d['work_dir'], additional_complete_file)\n                    if os.path.exists(d['finished_path']):\n                        result_item = d\n                        break\n                    if additional_complete_file and os.path.exists(\n                            additional_complete_file_path):\n                        result_item = d\n                        print(\"Exists\", additional_complete_file_path)\n                        break\n\n                if result_item is None:\n                    time.sleep(60)\n                else:\n                    result_items.remove(result_item)\n                    break\n\n            complete_dir = result_item['finished_path']\n            result_path = result_item['result_path']\n            error_path = result_item['error_path']\n            retry_num = result_item['retry_num']\n            launch_command = result_item['launch_command']\n\n            print(\"[%0.1f sec elapsed] processing item %s\" % (\n                time.time() - start, result_item))\n\n            if os.path.exists(error_path) or not os.path.exists(result_path):\n                if os.path.exists(error_path):\n                    print(\"Error path exists\", error_path)\n                    try:\n                        with open(error_path, \"rb\") as fd:\n                            exception = pickle.load(fd)\n                            print(exception)\n                    except Exception as e:\n                        exception = RuntimeError(\n                            \"Error, but couldn't read error path: %s %s\" % (\n                                type(e), str(e)))\n                else:\n                    exception = RuntimeError(\"Error, but no exception saved\")\n                if not os.path.exists(result_path):\n                    print(\"Result path does NOT exist\", result_path)\n\n                if retry_num < max_retries:\n                    print(\"Relaunching\", launch_command)\n                    attempt_dir = os.path.join(\n                        result_item['work_dir'], \"attempt.%d\" % retry_num)\n                    if os.path.exists(complete_dir):\n                        shutil.move(complete_dir, attempt_dir)  # directory\n                    if additional_complete_file and os.path.exists(\n                            additional_complete_file_path):\n                        shutil.move(additional_complete_file_path, attempt_dir)\n                    if os.path.exists(error_path):\n                        shutil.move(error_path, attempt_dir)\n                    subprocess.check_call(launch_command, shell=True)\n                    print(\"Invoked\", launch_command)\n                    result_item['retry_num'] += 1\n                    result_items.append(result_item)\n                    continue\n                else:\n                    print(\"Max retries exceeded\", max_retries)\n                    raise exception\n\n            if os.path.exists(result_path):\n                print(\"Result path exists\", result_path)\n                if result_serialization_method == \"save_predictor\":\n                    result = Class1AffinityPredictor.load(result_path)\n                elif result_serialization_method == \"pickle\":\n                    with open(result_path, \"rb\") as fd:\n                        result = pickle.load(fd)\n                else:\n                    raise ValueError(\n                        \"Unsupported serialization method\",\n                        result_serialization_method)\n\n                yield result\n            else:\n                raise RuntimeError(\"Results do not exist\", result_path)\n\n    return result_generator()\n\n\nparser = argparse.ArgumentParser(\n    usage=\"Entry point for cluster workers\")\nparser.add_argument(\n    \"--constant-data\",\n    required=True,\n)\nparser.add_argument(\n    \"--worker-data\",\n    required=True,\n)\nparser.add_argument(\n    \"--result-out\",\n    required=True,\n)\nparser.add_argument(\n    \"--error-out\",\n    required=True,\n)\nparser.add_argument(\n    \"--complete-dir\",\n)\nparser.add_argument(\n    \"--input-serialization-method\",\n    choices=(\"pickle\", \"dill\"),\n    default=\"pickle\")\nparser.add_argument(\n    \"--result-serialization-method\",\n    choices=(\"pickle\", \"save_predictor\"),\n    default=\"pickle\")\n\n\ndef worker_entry_point(argv=sys.argv[1:]):\n    \"\"\"\n    Entry point for the worker command.\n\n    Parameters\n    ----------\n    argv : list of string\n    \"\"\"\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    if args.input_serialization_method == \"dill\":\n        import dill\n        input_serialization_module = dill\n    else:\n        assert args.input_serialization_method == \"pickle\"\n        input_serialization_module = pickle\n\n    with open(args.constant_data, \"rb\") as fd:\n        constant_payload = input_serialization_module.load(fd)\n\n    with open(args.worker_data, \"rb\") as fd:\n        worker_data = input_serialization_module.load(fd)\n\n    kwargs = dict(worker_data)\n    if constant_payload['constant_data'] is not None:\n        kwargs['constant_data'] = constant_payload['constant_data']\n\n    try:\n        result = call_wrapped_kwargs(constant_payload['function'], kwargs)\n        if args.result_serialization_method == 'save_predictor':\n            result.save(args.result_out)\n        else:\n            with open(args.result_out, \"wb\") as fd:\n                pickle.dump(result, fd, pickle.HIGHEST_PROTOCOL)\n        print(\"Wrote:\", args.result_out)\n    except Exception as e:\n        print(\"Exception: \", e)\n        with open(args.error_out, \"wb\") as fd:\n            pickle.dump(e, fd, pickle.HIGHEST_PROTOCOL)\n        print(\"Wrote:\", args.error_out)\n        raise\n    finally:\n        if args.complete_dir:\n            os.mkdir(args.complete_dir)\n            print(\"Created: \", args.complete_dir)\n"
  },
  {
    "path": "mhcflurry/cluster_worker_entry_point.py",
    "content": "\"\"\"\nModule entry point for cluster workers to ensure the current interpreter is used.\n\"\"\"\n\nfrom .cluster_parallelism import worker_entry_point\n\n\nif __name__ == \"__main__\":\n    worker_entry_point()\n"
  },
  {
    "path": "mhcflurry/common.py",
    "content": "import collections\nimport logging\nimport sys\nimport os\nimport json\nimport warnings\n\nimport numpy\nimport pandas\nfrom mhcgnomes import parse, Allele, AlleleWithoutGene, Gene\n\n\nfrom . import amino_acid\n\n\ndef normalize_allele_name(\n        raw_name,\n        forbidden_substrings=(\"MIC\", \"HFE\"),\n        raise_on_error=True,\n        default_value=None,\n        use_allele_aliases=True):\n    \"\"\"\n    Parses a string into a normalized allele representation.\n\n    Parameters\n    ----------\n    raw_name : str\n        Input string to normalize\n\n    forbidden_substrings : tuple of str\n        Fail on inputs which contain any of these strings\n\n    raise_on_error : bool\n        If an allele fails to parse raise an exception if this argument is True\n\n    default_value : str or None\n        If raise_on_error is False and allele fails to parse, return this value\n\n    use_allele_aliases : bool\n        If True, use mhcgnomes allele alias table (IMGT historical name\n        reassignments). Some old allele names (e.g. B*44:01, Cw*0201) were\n        retired by IMGT when the original sequences were found to contain\n        errors. Defaults to False to preserve current IMGT nomenclature;\n        the pseudosequence loading code explicitly handles aliases with\n        fallback logic.\n\n    Returns\n    -------\n    str or None\n    \"\"\"\n    for forbidden_substring in forbidden_substrings:\n        if forbidden_substring in raw_name:\n            if raise_on_error:\n                raise ValueError(\"Unsupported gene in MHC allele name: %s\" % raw_name)\n            else:\n                return default_value\n    result = parse(\n        raw_name,\n        only_class1=True,\n        required_result_types=[Allele, AlleleWithoutGene, Gene],\n        preferred_result_types=[Allele],\n        use_allele_aliases=use_allele_aliases,\n        infer_class2_pairing=False,\n        collapse_singleton_haplotypes=True,\n        collapse_singleton_serotypes=True,\n        raise_on_error=False,\n    )\n    if result is None:\n        if raise_on_error:\n            raise ValueError(\"Invalid MHC allele name: %s\" % raw_name)\n        else:\n            return default_value\n    if (\n        result.annotation_pseudogene\n        or result.annotation_null\n        or result.annotation_questionable\n    ):\n        if raise_on_error:\n            raise ValueError(\"Unsupported annotation on MHC allele: %s\" % raw_name)\n        else:\n            return default_value\n    return result.restrict_allele_fields(2).to_string()\n\n\n_pytorch_backend = \"auto\"\n_PYTORCH_BACKEND_ALIASES = {\n    \"default\": \"auto\",\n}\n_TENSORFLOW_BACKEND_ALIASES = {\n    \"tensorflow\": \"auto\",\n    \"tensorflow-default\": \"auto\",\n    \"tensorflow-gpu\": \"gpu\",\n    \"tensorflow-cpu\": \"cpu\",\n}\n_VALID_PYTORCH_BACKENDS = (\"auto\", \"gpu\", \"mps\", \"cpu\")\n\n\ndef normalize_pytorch_backend(backend):\n    \"\"\"\n    Normalize a requested backend name and validate it.\n\n    Parameters\n    ----------\n    backend : str or None\n\n    Returns\n    -------\n    str or None\n    \"\"\"\n    if backend is None:\n        return None\n    backend = _PYTORCH_BACKEND_ALIASES.get(backend, backend)\n    if backend not in _VALID_PYTORCH_BACKENDS:\n        raise ValueError(\n            \"Invalid backend %r. Expected one of: %s\" % (\n                backend,\n                \", \".join(_VALID_PYTORCH_BACKENDS),\n            )\n        )\n    return backend\n\n\ndef configure_pytorch(backend=None, gpu_device_nums=None, num_threads=None):\n    \"\"\"\n    Configure PyTorch device backend and threading.\n\n    Can be called multiple times. Each call updates the settings provided.\n\n    Parameters\n    ----------\n    backend : str, optional\n        Device backend: \"auto\", \"gpu\", \"mps\", or \"cpu\".\n        \"auto\" selects the best available device (GPU > MPS > CPU).\n    gpu_device_nums : list of int, optional\n        CUDA devices to expose via CUDA_VISIBLE_DEVICES. An empty list hides\n        CUDA entirely for the current process.\n    num_threads : int, optional\n        Number of threads for PyTorch operations\n    \"\"\"\n    import torch\n\n    global _pytorch_backend\n\n    if backend is not None:\n        _pytorch_backend = normalize_pytorch_backend(backend)\n\n    if gpu_device_nums is not None:\n        os.environ['CUDA_VISIBLE_DEVICES'] = ','.join(map(str, gpu_device_nums))\n\n    if num_threads:\n        torch.set_num_threads(num_threads)\n\n\ndef configure_tensorflow(backend=None, gpu_device_nums=None, num_threads=None):\n    \"\"\"\n    Backward-compatible configuration entry point from the TF backend era.\n\n    Parameters\n    ----------\n    backend : str, optional\n        Legacy backend value retained for API compatibility. TensorFlow-era\n        names such as \"tensorflow-cpu\" are translated to the equivalent\n        PyTorch backend and emit a deprecation warning.\n    gpu_device_nums : list of int, optional\n        GPU devices to potentially use.\n    num_threads : int, optional\n        Number of threads for backend operations.\n    \"\"\"\n    translated_backend = None\n    if backend is not None:\n        translated_backend = _TENSORFLOW_BACKEND_ALIASES.get(backend)\n        if translated_backend is not None:\n            warnings.warn(\n                (\n                    \"configure_tensorflow(backend=%r) is deprecated; \"\n                    \"using PyTorch backend=%r. Use configure_pytorch() instead.\"\n                ) % (backend, translated_backend),\n                FutureWarning,\n                stacklevel=2,\n            )\n        else:\n            translated_backend = normalize_pytorch_backend(backend)\n    configure_pytorch(\n        backend=translated_backend,\n        gpu_device_nums=gpu_device_nums,\n        num_threads=num_threads,\n    )\n\n\ndef get_pytorch_device():\n    \"\"\"\n    Get the PyTorch device based on the backend set by ``configure_pytorch``.\n\n    Returns\n    -------\n    torch.device\n    \"\"\"\n    import torch\n\n    backend = _pytorch_backend\n\n    if backend == \"gpu\":\n        if not torch.cuda.is_available():\n            raise RuntimeError(\n                \"Backend 'gpu' requested but CUDA is not available\")\n        return torch.device('cuda')\n    elif backend == \"mps\":\n        if not (hasattr(torch.backends, 'mps') and\n                torch.backends.mps.is_available()):\n            raise RuntimeError(\n                \"Backend 'mps' requested but MPS is not available\")\n        return torch.device('mps')\n    elif backend == \"cpu\":\n        return torch.device('cpu')\n    else:\n        # auto: GPU > MPS > CPU\n        if torch.cuda.is_available():\n            return torch.device('cuda')\n        elif (hasattr(torch.backends, 'mps') and\n              torch.backends.mps.is_available()):\n            return torch.device('mps')\n        else:\n            return torch.device('cpu')\n\n\ndef configure_logging(verbose=False):\n    \"\"\"\n    Configure logging module using defaults.\n\n    Parameters\n    ----------\n    verbose : boolean\n        If true, output will be at level DEBUG, otherwise, INFO.\n    \"\"\"\n    level = logging.DEBUG if verbose else logging.INFO\n    logging.basicConfig(\n        format=\"%(asctime)s.%(msecs)d %(levelname)s %(module)s - %(funcName)s:\"\n        \" %(message)s\",\n        datefmt=\"%Y-%m-%d %H:%M:%S\",\n        stream=sys.stderr,\n        level=level,\n    )\n\n\n\ndef amino_acid_distribution(peptides, smoothing=0.0):\n    \"\"\"\n    Compute the fraction of each amino acid across a collection of peptides.\n\n    Parameters\n    ----------\n    peptides : list of string\n    smoothing : float, optional\n        Small number (e.g. 0.01) to add to all amino acid fractions. The higher\n        the number the more uniform the distribution.\n\n    Returns\n    -------\n    pandas.Series indexed by amino acids\n    \"\"\"\n    peptides = pandas.Series(peptides)\n    aa_counts = pandas.Series(peptides.map(collections.Counter).sum())\n    normalized = aa_counts / aa_counts.sum()\n    if smoothing:\n        normalized += smoothing\n        normalized /= normalized.sum()\n    return normalized\n\n\ndef random_peptides(num, length=9, distribution=None):\n    \"\"\"\n    Generate random peptides (kmers).\n\n    Parameters\n    ----------\n    num : int\n        Number of peptides to return\n\n    length : int\n        Length of each peptide\n\n    distribution : pandas.Series\n        Maps 1-letter amino acid abbreviations to\n        probabilities. If not specified a uniform\n        distribution is used.\n\n    Returns\n    ----------\n    list of string\n\n    \"\"\"\n    if num == 0:\n        return []\n    if distribution is None:\n        distribution = pandas.Series(1, index=sorted(amino_acid.COMMON_AMINO_ACIDS))\n        distribution /= distribution.sum()\n\n    return [\n        \"\".join(peptide_sequence)\n        for peptide_sequence in numpy.random.choice(\n            distribution.index, p=distribution.values, size=(int(num), int(length))\n        )\n    ]\n\n\ndef positional_frequency_matrix(peptides):\n    \"\"\"\n    Given a set of peptides, calculate a length x amino acids frequency matrix.\n\n    Parameters\n    ----------\n    peptides : list of string\n        All of same length\n\n    Returns\n    -------\n    pandas.DataFrame\n        Index is position, columns are amino acids\n    \"\"\"\n    length = len(peptides[0])\n    assert all(len(peptide) == length for peptide in peptides)\n    counts = pandas.DataFrame(\n        index=[a for a in amino_acid.BLOSUM62_MATRIX.index if a != \"X\"],\n        columns=numpy.arange(1, length + 1),\n    )\n    for i in range(length):\n        counts[i + 1] = pandas.Series([p[i] for p in peptides]).value_counts()\n    result = (counts / len(peptides)).fillna(0.0).T\n    result.index.name = \"position\"\n    return result\n\n\ndef save_weights(weights_list, filename):\n    \"\"\"\n    Save model weights to the given filename using numpy's \".npz\" format.\n\n    Parameters\n    ----------\n    weights_list : list of numpy array\n\n    filename : string\n    \"\"\"\n    numpy.savez(\n        filename, **dict(((\"array_%d\" % i), w) for (i, w) in enumerate(weights_list))\n    )\n\n\ndef load_weights(filename):\n    \"\"\"\n    Restore model weights from the given filename, which should have been\n    created with `save_weights`.\n\n    Parameters\n    ----------\n    filename : string\n\n    Returns\n    ----------\n    list of array\n    \"\"\"\n    with numpy.load(filename) as loaded:\n        weights = [loaded[\"array_%d\" % i] for i in range(len(loaded.keys()))]\n    return weights\n\n\nclass NumpyJSONEncoder(json.JSONEncoder):\n    \"\"\"\n    JSON encoder (used with json module) that can handle numpy arrays.\n    \"\"\"\n\n    def default(self, obj):\n        if isinstance(\n            obj,\n            (\n                numpy.int_,\n                numpy.intc,\n                numpy.intp,\n                numpy.int8,\n                numpy.int16,\n                numpy.int32,\n                numpy.int64,\n                numpy.uint8,\n                numpy.uint16,\n                numpy.uint32,\n                numpy.uint64,\n            ),\n        ):\n            return int(obj)\n        elif isinstance(\n            obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)\n        ):\n            return float(obj)\n        if isinstance(obj, numpy.ndarray):\n            return obj.tolist()\n        return json.JSONEncoder.default(self, obj)\n"
  },
  {
    "path": "mhcflurry/custom_loss.py",
    "content": "\"\"\"\nCustom loss functions.\n\nFor losses supporting inequalities, each training data point is associated with\none of (=), (<), or (>). For e.g. (>) inequalities, penalization is applied only\nif the prediction is less than the given value.\n\nThis module now delegates to pytorch_losses.py for the actual loss implementations.\n\"\"\"\nimport numpy\n\n# Import PyTorch implementations\nfrom .pytorch_losses import (\n    MSEWithInequalities as PyTorchMSEWithInequalities,\n    MSEWithInequalitiesAndMultipleOutputs as PyTorchMSEWithInequalitiesAndMultipleOutputs,\n    MultiallelicMassSpecLoss as PyTorchMultiallelicMassSpecLoss,\n    StandardLoss as PyTorchStandardLoss,\n)\n\nCUSTOM_LOSSES = {}\n\n\ndef get_loss(name):\n    \"\"\"\n    Get a custom_loss.Loss instance by name.\n\n    Parameters\n    ----------\n    name : string\n\n    Returns\n    -------\n    custom_loss.Loss\n    \"\"\"\n    if name.startswith(\"custom:\"):\n        try:\n            custom_loss = CUSTOM_LOSSES[name.replace(\"custom:\", \"\")]\n        except KeyError:\n            raise ValueError(\n                \"No such custom loss: %s. Supported losses are: %s\" % (\n                    name,\n                    \", \".join([\n                        \"custom:\" + loss_name for loss_name in CUSTOM_LOSSES\n                    ])))\n        return custom_loss\n    return StandardKerasLoss(name)\n\n\nclass Loss(object):\n    \"\"\"\n    Thin wrapper to keep track of neural network loss functions, which could\n    be custom or baked into PyTorch.\n\n    Each subclass or instance should define these properties/methods:\n    - name : string\n    - loss : callable\n        This is the PyTorch loss function\n    - encode_y : numpy.ndarray -> numpy.ndarray\n        Transformation to apply to regression target before fitting\n    \"\"\"\n    def __init__(self, name=None):\n        self.name = name if name else self.name  # use name from class instance\n\n    def __str__(self):\n        return \"<Loss: %s>\" % self.name\n\n    def loss(self, y_true, y_pred):\n        raise NotImplementedError()\n\n    def get_keras_loss(self, reduction=\"sum_over_batch_size\"):\n        \"\"\"\n        Backward-compatible accessor from the TF/Keras backend era.\n\n        Parameters\n        ----------\n        reduction : string\n            Ignored. Kept for API compatibility.\n        \"\"\"\n        del reduction  # unused legacy argument\n        return self.loss\n\n\nclass StandardKerasLoss(Loss):\n    \"\"\"\n    A standard loss function such as MSE.\n    \"\"\"\n    supports_inequalities = False\n    supports_multiple_outputs = False\n\n    def __init__(self, loss_name=\"mse\"):\n        self._pytorch_loss = PyTorchStandardLoss(loss_name)\n        self.loss = loss_name\n        Loss.__init__(self, loss_name)\n\n    @staticmethod\n    def encode_y(y):\n        return numpy.array(y, dtype=numpy.float32)\n\n\nclass TransformPredictionsLossWrapper(Loss):\n    \"\"\"\n    Wrapper that applies an arbitrary transform to y_pred before calling an\n    underlying loss function.\n\n    The y_pred_transform function should be a tensor -> tensor function.\n    \"\"\"\n    def __init__(\n            self,\n            loss,\n            y_pred_transform=None):\n        self.wrapped_loss = loss\n        self.name = \"transformed_%s\" % loss.name\n        self.y_pred_transform = y_pred_transform\n        self.supports_inequalities = loss.supports_inequalities\n        self.supports_multiple_outputs = loss.supports_multiple_outputs\n\n    def encode_y(self, *args, **kwargs):\n        return self.wrapped_loss.encode_y(*args, **kwargs)\n\n    def loss(self, y_true, y_pred):\n        y_pred_transformed = self.y_pred_transform(y_pred)\n        return self.wrapped_loss.loss(y_true, y_pred_transformed)\n\n\nclass MSEWithInequalities(Loss):\n    \"\"\"\n    Supports training a regression model on data that includes inequalities\n    (e.g. x < 100). Mean square error is used as the loss for elements with\n    an (=) inequality. For elements with e.g. a (> 0.5) inequality, then the loss\n    for that element is (y - 0.5)^2 (standard MSE) if y < 500 and 0 otherwise.\n\n    This loss assumes that the normal range for y_true and y_pred is 0 - 1. As a\n    hack, the implementation uses other intervals for y_pred to encode the\n    inequality information.\n\n    y_true is interpreted as follows:\n\n    between 0 - 1\n       Regular MSE loss is used. Penalty (y_pred - y_true)**2 is applied if\n       y_pred is greater or less than y_true.\n\n    between 2 - 3:\n       Treated as a \">\" inequality. Penalty (y_pred - (y_true - 2))**2 is\n       applied only if y_pred is less than y_true - 2.\n\n    between 4 - 5:\n       Treated as a \"<\" inequality. Penalty (y_pred - (y_true - 4))**2 is\n       applied only if y_pred is greater than y_true - 4.\n    \"\"\"\n    name = \"mse_with_inequalities\"\n    supports_inequalities = True\n    supports_multiple_outputs = False\n\n    def __init__(self):\n        self._pytorch_loss = PyTorchMSEWithInequalities()\n\n    @staticmethod\n    def encode_y(y, inequalities=None):\n        return PyTorchMSEWithInequalities.encode_y(y, inequalities)\n\n    @staticmethod\n    def _max_value(values):\n        if hasattr(values, \"detach\"):\n            return float(values.detach().max().item())\n        return float(numpy.asarray(values).max())\n\n    def loss(self, y_true, y_pred):\n        # Support both historical Keras-style (y_true, y_pred) and current\n        # PyTorch-style (y_pred, y_true) calling conventions.\n        if self._max_value(y_true) <= 1.5 and self._max_value(y_pred) > 1.5:\n            y_true, y_pred = y_pred, y_true\n        return self._pytorch_loss(y_pred, y_true)\n\n\nclass MSEWithInequalitiesAndMultipleOutputs(Loss):\n    \"\"\"\n    Loss supporting inequalities and multiple outputs.\n\n    This loss assumes that the normal range for y_true and y_pred is 0 - 1. As a\n    hack, the implementation uses other intervals for y_pred to encode the\n    inequality and output-index information.\n\n    Inequalities are encoded into the regression target as in\n    the MSEWithInequalities loss.\n\n    Multiple outputs are encoded by mapping each regression target x (after\n    transforming for inequalities) using the rule x -> x + i * 10 where i is\n    the output index.\n\n    The reason for explicitly encoding multiple outputs this way (rather than\n    just making the regression target a matrix instead of a vector) is that\n    in our use cases we frequently have missing data in the regression target.\n    This encoding gives a simple way to penalize only on (data point, output\n    index) pairs that have labels.\n    \"\"\"\n    name = \"mse_with_inequalities_and_multiple_outputs\"\n    supports_inequalities = True\n    supports_multiple_outputs = True\n\n    def __init__(self):\n        self._pytorch_loss = PyTorchMSEWithInequalitiesAndMultipleOutputs()\n\n    @staticmethod\n    def encode_y(y, inequalities=None, output_indices=None):\n        return PyTorchMSEWithInequalitiesAndMultipleOutputs.encode_y(\n            y, inequalities, output_indices\n        )\n\n    def loss(self, y_true, y_pred):\n        # Support both historical Keras-style (y_true, y_pred) and current\n        # PyTorch-style (y_pred, y_true) calling conventions.\n        if (\n                getattr(y_true, \"ndim\", None) == 2 and\n                getattr(y_pred, \"ndim\", None) == 2 and\n                y_true.shape[1] > 1 and\n                y_pred.shape[1] == 1):\n            y_true, y_pred = y_pred, y_true\n        else:\n            max_true = MSEWithInequalities._max_value(y_true)\n            max_pred = MSEWithInequalities._max_value(y_pred)\n            if max_true <= 1.5 and max_pred > 1.5:\n                y_true, y_pred = y_pred, y_true\n        return self._pytorch_loss(y_pred, y_true)\n\n\nclass MultiallelicMassSpecLoss(Loss):\n    \"\"\"\n    Multiallelic mass spec loss function.\n    \"\"\"\n    name = \"multiallelic_mass_spec_loss\"\n    supports_inequalities = True\n    supports_multiple_outputs = False\n\n    def __init__(self, delta=0.2, multiplier=1.0):\n        self.delta = delta\n        self.multiplier = multiplier\n        self._pytorch_loss = PyTorchMultiallelicMassSpecLoss(delta, multiplier)\n\n    @staticmethod\n    def encode_y(y):\n        return PyTorchMultiallelicMassSpecLoss.encode_y(y)\n\n    def loss(self, y_true, y_pred):\n        # Support both historical Keras-style (y_true, y_pred) and current\n        # PyTorch-style (y_pred, y_true) calling conventions.\n        if getattr(y_true, \"ndim\", None) == 2 and y_true.shape[1] > 1:\n            y_true, y_pred = y_pred, y_true\n        return self._pytorch_loss(y_pred, y_true)\n\n\ndef check_shape(name, arr, expected_shape):\n    \"\"\"\n    Raise ValueError if arr.shape != expected_shape.\n\n    Parameters\n    ----------\n    name : string\n        Included in error message to aid debugging\n    arr : numpy.ndarray\n    expected_shape : tuple of int\n    \"\"\"\n    if arr.shape != expected_shape:\n        raise ValueError(\"Expected %s to have shape %s not %s\" % (\n            name, str(expected_shape), str(arr.shape)))\n\n\n# Register custom losses.\nfor cls in [\n        MSEWithInequalities,\n        MSEWithInequalitiesAndMultipleOutputs,\n        MultiallelicMassSpecLoss]:\n    CUSTOM_LOSSES[cls.name] = cls()\n"
  },
  {
    "path": "mhcflurry/data_dependent_weights_initialization.py",
    "content": "\"\"\"\nLayer-sequential unit-variance initialization for neural networks.\n\nSee:\n    Mishkin and Matas, \"All you need is a good init\". 2016.\n    https://arxiv.org/abs/1511.06422\n\"\"\"\n#\n# LSUV initialization code in this file is adapted from:\n#   https://github.com/ducha-aiki/LSUV-keras/blob/master/lsuv_init.py\n# by Dmytro Mishkin\n#\n# Here is the license for the original code:\n#\n#\n# Copyright (C) 2017, Dmytro Mishkin\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n# 1. Redistributions of source code must retain the above copyright\n#    notice, this list of conditions and the following disclaimer.\n# 2. Redistributions in binary form must reproduce the above copyright\n#    notice, this list of conditions and the following disclaimer in the\n#    documentation and/or other materials provided with the\n#    distribution.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\nimport numpy\nimport torch\nimport torch.nn as nn\n\n\ndef svd_orthonormal(shape):\n    \"\"\"\n    Generate an orthonormal matrix using SVD.\n\n    Parameters\n    ----------\n    shape : tuple\n        Shape of the weight matrix (must have at least 2 dimensions)\n\n    Returns\n    -------\n    numpy.ndarray\n        Orthonormal matrix of the given shape\n    \"\"\"\n    # Orthonormal init code is from Lasagne\n    # https://github.com/Lasagne/Lasagne/blob/master/lasagne/init.py\n    if len(shape) < 2:\n        raise RuntimeError(\"Only shapes of length 2 or more are supported.\")\n    flat_shape = (shape[0], numpy.prod(shape[1:]))\n    a = numpy.random.standard_normal(flat_shape).astype(\"float32\")\n    u, _, v = numpy.linalg.svd(a, full_matrices=False)\n    q = u if u.shape == flat_shape else v\n    q = q.reshape(shape)\n    return q\n\n\ndef get_activations_pytorch(model, layer_name, x_dict, device=None):\n    \"\"\"\n    Get activations from a specific layer in a PyTorch model.\n\n    Parameters\n    ----------\n    model : nn.Module\n        PyTorch model\n    layer_name : str\n        Name of the layer to get activations from\n    x_dict : dict\n        Input dictionary with tensors\n    device : torch.device, optional\n        Device to run on\n\n    Returns\n    -------\n    numpy.ndarray\n        Activations from the specified layer\n    \"\"\"\n    if device is None:\n        device = next(model.parameters()).device\n\n    activations = {}\n\n    def hook_fn(module, input, output):\n        activations['output'] = output.detach().cpu().numpy()\n\n    # Find the layer by name\n    target_layer = None\n    for name, module in model.named_modules():\n        if name == layer_name:\n            target_layer = module\n            break\n\n    if target_layer is None:\n        raise ValueError(f\"Layer '{layer_name}' not found in model\")\n\n    # Register hook\n    handle = target_layer.register_forward_hook(hook_fn)\n\n    # Forward pass\n    model.eval()\n    with torch.no_grad():\n        # Convert inputs to tensors\n        inputs = {}\n        for key, value in x_dict.items():\n            if isinstance(value, numpy.ndarray):\n                inputs[key] = torch.from_numpy(value).to(device)\n            else:\n                inputs[key] = value.to(device)\n\n        # Run forward pass\n        _ = model(inputs)\n\n    # Remove hook\n    handle.remove()\n\n    return activations['output']\n\n\ndef get_activations(model, layer, X_batch):\n    \"\"\"\n    Backward-compatible activation helper from the TF backend era.\n\n    Parameters\n    ----------\n    model : nn.Module\n    layer : str or object with ``name`` attribute\n    X_batch : dict\n        Network input dictionary.\n    \"\"\"\n    layer_name = layer if isinstance(layer, str) else getattr(layer, \"name\", None)\n    if layer_name is None:\n        raise ValueError(\"Layer must be a layer name or an object with a name attribute.\")\n    if not isinstance(X_batch, dict):\n        raise ValueError(\"X_batch must be a dict of model inputs.\")\n    return get_activations_pytorch(model, layer_name, X_batch)\n\n\ndef lsuv_init(model, batch, verbose=True, margin=0.1, max_iter=100):\n    \"\"\"\n    Initialize neural network weights using layer-sequential unit-variance\n    initialization.\n\n    See:\n        Mishkin and Matas, \"All you need is a good init\". 2016.\n        https://arxiv.org/abs/1511.06422\n\n    Parameters\n    ----------\n    model : nn.Module\n        PyTorch model\n    batch : dict\n        Training data batch (dict of numpy arrays or tensors)\n    verbose : boolean\n        Whether to print progress to stdout\n    margin : float\n        Acceptable variance margin\n    max_iter : int\n        Maximum iterations per layer\n\n    Returns\n    -------\n    nn.Module\n        Same model, modified in-place\n    \"\"\"\n    needed_variance = 1.0\n    layers_initialized = 0\n\n    device = next(model.parameters()).device\n\n    # Get list of layers to initialize (Dense/Linear and Conv layers)\n    layers_to_init = []\n    for name, module in model.named_modules():\n        if isinstance(module, (nn.Linear, nn.Conv1d, nn.Conv2d)):\n            layers_to_init.append((name, module))\n\n    for layer_name, layer in layers_to_init:\n        # Get output shape\n        try:\n            activations = get_activations_pytorch(model, layer_name, batch, device)\n            output_size = numpy.prod(activations.shape[1:])\n        except Exception as e:\n            if verbose:\n                print(f'LSUV initialization skipping {layer_name}: {e}')\n            continue\n\n        # Skip small layers\n        if output_size < 32:\n            if verbose:\n                print(f'LSUV initialization skipping {layer_name} (output size {output_size} < 32)')\n            continue\n\n        layers_initialized += 1\n\n        # Apply orthonormal initialization to weights\n        with torch.no_grad():\n            weight = layer.weight.data.cpu().numpy()\n            ortho_weight = svd_orthonormal(weight.shape)\n            layer.weight.data = torch.from_numpy(ortho_weight).to(device)\n\n        # Get activations and compute variance\n        activations = get_activations_pytorch(model, layer_name, batch, device)\n        variance = numpy.var(activations)\n\n        iteration = 0\n        if verbose:\n            print(layer_name, variance)\n\n        while abs(needed_variance - variance) > margin:\n            if verbose:\n                print(\n                    'LSUV initialization',\n                    layer_name,\n                    iteration,\n                    needed_variance,\n                    margin,\n                    variance)\n\n            if numpy.abs(numpy.sqrt(variance)) < 1e-7:\n                break  # avoid zero division\n\n            # Scale weights to achieve unit variance\n            with torch.no_grad():\n                scale_factor = numpy.sqrt(needed_variance) / numpy.sqrt(variance)\n                layer.weight.data *= scale_factor\n\n            # Recompute activations and variance\n            activations = get_activations_pytorch(model, layer_name, batch, device)\n            variance = numpy.var(activations)\n\n            iteration += 1\n            if iteration >= max_iter:\n                break\n\n    if verbose:\n        print('Done with LSUV: total layers initialized', layers_initialized)\n    return model\n"
  },
  {
    "path": "mhcflurry/downloads.py",
    "content": "\"\"\"\nManage local downloaded data.\n\"\"\"\n\nimport logging\nimport yaml\nfrom os.path import join, exists\nfrom os import environ\nfrom shlex import quote\nfrom importlib.resources import files\nfrom collections import OrderedDict\nfrom appdirs import user_data_dir\n\nimport pandas\n\nENVIRONMENT_VARIABLES = [\n    \"MHCFLURRY_DATA_DIR\",\n    \"MHCFLURRY_DOWNLOADS_CURRENT_RELEASE\",\n    \"MHCFLURRY_DOWNLOADS_DIR\",\n    \"MHCFLURRY_DEFAULT_CLASS1_MODELS\"\n]\n\n_DOWNLOADS_DIR = None\n_CURRENT_RELEASE = None\n_METADATA = None\n_MHCFLURRY_DEFAULT_CLASS1_MODELS_DIR = environ.get(\n    \"MHCFLURRY_DEFAULT_CLASS1_MODELS\")\n_MHCFLURRY_DEFAULT_CLASS1_PRESENTATION_MODELS_DIR = environ.get(\n    \"MHCFLURRY_DEFAULT_CLASS1_PRESENTATION_MODELS_DIR\")\n_MHCFLURRY_DEFAULT_CLASS1_PROCESSING_MODELS_DIR = environ.get(\n    \"MHCFLURRY_DEFAULT_CLASS1_PROCESSING_MODELS_DIR\")\n\n\ndef get_downloads_dir():\n    \"\"\"\n    Return the path to local downloaded data\n    \"\"\"\n    return _DOWNLOADS_DIR\n\n\ndef get_current_release():\n    \"\"\"\n    Return the current downloaded data release\n    \"\"\"\n    return _CURRENT_RELEASE\n\n\ndef get_downloads_metadata():\n    \"\"\"\n    Return the contents of downloads.yml as a dict\n    \"\"\"\n    global _METADATA\n    if _METADATA is None:\n        _METADATA = yaml.safe_load(\n            files(\"mhcflurry\").joinpath(\"downloads.yml\").read_text()\n        )\n    return _METADATA\n\n\ndef get_default_class1_models_dir(test_exists=True):\n    \"\"\"\n    Return the absolute path to the default class1 models dir.\n\n    If environment variable MHCFLURRY_DEFAULT_CLASS1_MODELS is set to an\n    absolute path, return that path. If it's set to a relative path (i.e. does\n    not start with /) then return that path taken to be relative to the mhcflurry\n    downloads dir.\n\n    If environment variable MHCFLURRY_DEFAULT_CLASS1_MODELS is NOT set,\n    then return the path to downloaded models in the \"models_class1\" download.\n\n    Parameters\n    ----------\n\n    test_exists : boolean, optional\n        Whether to raise an exception of the path does not exist\n\n    Returns\n    -------\n    string : absolute path\n    \"\"\"\n    if _MHCFLURRY_DEFAULT_CLASS1_MODELS_DIR:\n        result = join(get_downloads_dir(), _MHCFLURRY_DEFAULT_CLASS1_MODELS_DIR)\n        if test_exists and not exists(result):\n            raise IOError(\"No such directory: %s\" % result)\n        return result\n    return get_path(\n        \"models_class1_pan\", \"models.combined\", test_exists=test_exists)\n\n\ndef get_default_class1_presentation_models_dir(test_exists=True):\n    \"\"\"\n    Return the absolute path to the default class1 presentation models dir.\n\n    See `get_default_class1_models_dir`.\n\n    If environment variable MHCFLURRY_DEFAULT_CLASS1_PRESENTATION_MODELS is set\n    to an absolute path, return that path. If it's set to a relative path (does\n    not start with /) then return that path taken to be relative to the mhcflurry\n    downloads dir.\n\n    Parameters\n    ----------\n\n    test_exists : boolean, optional\n        Whether to raise an exception of the path does not exist\n\n    Returns\n    -------\n    string : absolute path\n    \"\"\"\n    if _MHCFLURRY_DEFAULT_CLASS1_PRESENTATION_MODELS_DIR:\n        result = join(\n            get_downloads_dir(),\n            _MHCFLURRY_DEFAULT_CLASS1_PRESENTATION_MODELS_DIR)\n        if test_exists and not exists(result):\n            raise IOError(\"No such directory: %s\" % result)\n        return result\n    return get_path(\n        \"models_class1_presentation\", \"models\", test_exists=test_exists)\n\n\ndef get_default_class1_processing_models_dir(test_exists=True):\n    \"\"\"\n    Return the absolute path to the default class1 processing models dir.\n\n    See `get_default_class1_models_dir`.\n\n    If environment variable MHCFLURRY_DEFAULT_CLASS1_PROCESSING_MODELS is set\n    to an absolute path, return that path. If it's set to a relative path (does\n    not start with /) then return that path taken to be relative to the mhcflurry\n    downloads dir.\n\n    Parameters\n    ----------\n\n    test_exists : boolean, optional\n        Whether to raise an exception of the path does not exist\n\n    Returns\n    -------\n    string : absolute path\n    \"\"\"\n    if _MHCFLURRY_DEFAULT_CLASS1_PROCESSING_MODELS_DIR:\n        result = join(\n            get_downloads_dir(),\n            _MHCFLURRY_DEFAULT_CLASS1_PROCESSING_MODELS_DIR)\n        if test_exists and not exists(result):\n            raise IOError(\"No such directory: %s\" % result)\n        return result\n\n    # Default to the 'with flanks' model variant.\n    return get_path(\n        \"models_class1_processing\", \"models.selected.with_flanks\", test_exists=test_exists)\n\n\ndef get_current_release_downloads():\n    \"\"\"\n    Return a dict of all available downloads in the current release.\n\n    The dict keys are the names of the downloads. The values are a dict\n    with two entries:\n\n    downloaded : bool\n        Whether the download is currently available locally\n\n    metadata : dict\n        Info about the download from downloads.yml such as URL\n\n    up_to_date : bool or None\n        Whether the download URL(s) match what was used to download the current\n        data. This is None if it cannot be determined.\n    \"\"\"\n    downloads = (\n        get_downloads_metadata()\n        ['releases']\n        [get_current_release()]\n        ['downloads'])\n\n    def up_to_date(dir, urls):\n        try:\n            df = pandas.read_csv(join(dir, \"DOWNLOAD_INFO.csv\"))\n            return list(df.url) == list(urls)\n        except IOError:\n            return None\n\n    return OrderedDict(\n        (download[\"name\"], {\n            'downloaded': exists(join(get_downloads_dir(), download[\"name\"])),\n            'up_to_date': up_to_date(\n                join(get_downloads_dir(), download[\"name\"]),\n                [download['url']] if 'url' in download else download['part_urls']),\n            'metadata': download,\n        }) for download in downloads\n    )\n\n\ndef get_path(download_name, filename='', test_exists=True):\n    \"\"\"\n    Get the local path to a file in a MHCflurry download\n\n    Parameters\n    -----------\n    download_name : string\n\n    filename : string\n        Relative path within the download to the file of interest\n\n    test_exists : boolean\n        If True (default) throw an error telling the user how to download the\n        data if the file does not exist\n\n    Returns\n    -----------\n    string giving local absolute path\n    \"\"\"\n    assert '/' not in download_name, \"Invalid download: %s\" % download_name\n    path = join(get_downloads_dir(), download_name, filename)\n    if test_exists and not exists(path):\n        raise RuntimeError(\n            \"Missing MHCflurry downloadable file: %s. \"\n            \"To download this data, run:\\n\\tmhcflurry-downloads fetch %s\\n\"\n            \"in a shell.\"\n            % (quote(path), download_name))\n    return path\n\n\ndef configure():\n    \"\"\"\n    Setup various global variables based on environment variables.\n    \"\"\"\n    global _DOWNLOADS_DIR\n    global _CURRENT_RELEASE\n\n    _CURRENT_RELEASE = None\n    _DOWNLOADS_DIR = environ.get(\"MHCFLURRY_DOWNLOADS_DIR\")\n    if not _DOWNLOADS_DIR:\n        metadata = get_downloads_metadata()\n        _CURRENT_RELEASE = environ.get(\"MHCFLURRY_DOWNLOADS_CURRENT_RELEASE\")\n        if not _CURRENT_RELEASE:\n            _CURRENT_RELEASE = metadata['current-release']\n\n        current_release_compatability = (\n            metadata[\"releases\"][_CURRENT_RELEASE][\"compatibility-version\"])\n        current_compatability = metadata[\"current-compatibility-version\"]\n        if current_release_compatability != current_compatability:\n            logging.warning(\n                \"The specified downloads are not compatible with this version \"\n                \"of the MHCflurry codebase. Downloads: release %s, \"\n                \"compatability version: %d. Code compatability version: %d\",\n                _CURRENT_RELEASE,\n                current_release_compatability,\n                current_compatability)\n\n        data_dir = environ.get(\"MHCFLURRY_DATA_DIR\")\n        if not data_dir:\n            # increase the version every time we make a breaking change in\n            # how the data is organized. For changes to e.g. just model\n            # serialization, the downloads release numbers should be used.\n            data_dir = user_data_dir(\"mhcflurry\", version=\"4\")\n        _DOWNLOADS_DIR = join(data_dir, _CURRENT_RELEASE)\n\n    logging.debug(\"Configured MHCFLURRY_DOWNLOADS_DIR: %s\", _DOWNLOADS_DIR)\n\n\nconfigure()\n"
  },
  {
    "path": "mhcflurry/downloads.yml",
    "content": "# This file describes collections of data and trained model weights that are\n# released with MHCflurry. We refer to these datasets as \"downloads.\"\n#\n# Downloads are organized into \"releases\", which generally correspond with\n# MHCflurry releases of the same version.\n#\n# When users run \"mhcflurry-downloads fetch\" without specifying any downloads\n# by name, the downloads with \"default=true\" are downloaded.\n\n# This should usually be the latest release.\ncurrent-release: 2.2.0\n\n# An integer indicating what models the current MHCflurry code base is compatible\n# with. Increment this integer when changes are made to MHCflurry that would break\n# the use of previously released models.\ncurrent-compatibility-version: 2\n\n# Add new releases here as they are made.\nreleases:\n  2.2.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1_pan\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_pan.selected.20200610.tar.bz2\n        default: false\n\n      - name: models_class1_presentation\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_presentation.20200611.tar.bz2\n        default: true\n\n      - name: models_class1_processing\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_processing.selected.20200611.tar.bz2\n        default: false\n\n      - name: models_class1_pan_unselected\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_pan.20200610.tar.bz2\n        default: false\n\n      - name: models_class1_pan_variants\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_pan_variants.selected.20200610.tar.bz2\n        default: false\n\n      - name: data_evaluation\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.ab\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.ac\n        default: false\n\n      - name: analysis_predictor_info\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/analysis_predictor_info.20200711.tar.bz2\n        default: false\n\n      - name: data_predictions\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ab\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ac\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ad\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ae\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.af\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ag\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ah\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ai\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.aj\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ak\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.al\n        default: false\n\n      - name: data_mass_spec_annotated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_annotated.20191226.tar.bz2\n        default: false\n\n      - name: data_references\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/data_references.20190927.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.1/data_iedb.20231019.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_systemhcatlas.20190506.tar.bz2\n        default: false\n\n      - name: allele_sequences\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.1/allele_sequences.20231025.tar.bz2\n        default: false\n\n      - name: random_peptide_predictions\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/random_peptide_predictions.20190506.tar.bz2\n        default: false\n\n      - name: data_published\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_published.20200501.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.1/data_curated.20231023.tar.bz2\n        default: true\n\n      # Older downloads\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_selected_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_selected_no_mass_spec.20180225.tar.bz2\n        default: false\n\n      - name: models_class1_unselected\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_unselected.20180221.tar.bz2\n        default: false\n\n      - name: models_class1_trained_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_trained_with_mass_spec.20180228.tar.bz2\n        default: false\n\n      - name: models_class1_unselected_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_unselected_with_mass_spec.20180227.tar.bz2\n        default: false\n\n      - name: models_class1_minimal\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_minimal.20180226.tar.bz2\n        default: false\n\n  2.0.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1_pan\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_pan.selected.20200610.tar.bz2\n        default: false\n\n      - name: models_class1_presentation\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_presentation.20200611.tar.bz2\n        default: true\n\n      - name: models_class1_processing\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_processing.selected.20200611.tar.bz2\n        default: false\n\n      - name: models_class1_pan_unselected\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_pan.20200610.tar.bz2\n        default: false\n\n      - name: models_class1_pan_variants\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/models_class1_pan_variants.selected.20200610.tar.bz2\n        default: false\n\n      - name: data_evaluation\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.ab\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.ac\n        default: false\n\n      - name: analysis_predictor_info\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/analysis_predictor_info.20200711.tar.bz2\n        default: false\n\n      - name: data_predictions\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ab\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ac\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ad\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ae\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.af\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ag\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ah\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ai\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.aj\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.ak\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_predictions.20200713.tar.bz2.part.al\n        default: false\n\n      - name: data_mass_spec_annotated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_annotated.20191226.tar.bz2\n        default: false\n\n      - name: data_references\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/data_references.20190927.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_iedb.20200427.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_systemhcatlas.20190506.tar.bz2\n        default: false\n\n      - name: allele_sequences\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/allele_sequences.20191231.tar.bz2\n        default: false\n\n      - name: random_peptide_predictions\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/random_peptide_predictions.20190506.tar.bz2\n        default: false\n\n      - name: data_published\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_published.20200501.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_curated.20200427.tar.bz2\n        default: true\n\n      # Older downloads\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_selected_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_selected_no_mass_spec.20180225.tar.bz2\n        default: false\n\n      - name: models_class1_unselected\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_unselected.20180221.tar.bz2\n        default: false\n\n      - name: models_class1_trained_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_trained_with_mass_spec.20180228.tar.bz2\n        default: false\n\n      - name: models_class1_unselected_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_unselected_with_mass_spec.20180227.tar.bz2\n        default: false\n\n      - name: models_class1_minimal\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_minimal.20180226.tar.bz2\n        default: false\n\n  1.7.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1_pan\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/models_class1_pan.selected.20200610.tar.bz2\n        default: false\n\n      - name: models_class1_presentation\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/models_class1_presentation.20200611.tar.bz2\n        default: true\n\n      - name: models_class1_processing\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/models_class1_processing.selected.20200611.tar.bz2\n        default: false\n\n      - name: models_class1_pan_unselected\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/models_class1_pan.20200610.tar.bz2\n        default: false\n\n      - name: models_class1_pan_variants\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/models_class1_pan_variants.selected.20200610.tar.bz2\n        default: false\n\n      - name: models_class1_processing_variants\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_processing_variants.selected.20200124.tar.bz2\n        default: false\n\n      - name: data_evaluation\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.ab\n          - https://github.com/openvax/mhcflurry/releases/download/pre-2.0/data_evaluation.20200611.tar.bz2.part.ac\n        default: false\n\n      - name: analysis_predictor_info\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-2.0/analysis_predictor_info.20200610.tar.bz2\n        defaultl: false\n\n      - name: data_predictions\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ab\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ac\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ad\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ae\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.af\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ag\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ah\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ai\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.aj\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ak\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.al\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.am\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.an\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_predictions.20200430.tar.bz2.part.ao\n        default: false\n\n      - name: data_mass_spec_annotated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_annotated.20191226.tar.bz2\n        default: false\n\n      - name: data_references\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/data_references.20190927.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_iedb.20200427.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_systemhcatlas.20190506.tar.bz2\n        default: false\n\n      - name: allele_sequences\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/allele_sequences.20191231.tar.bz2\n        default: false\n\n      - name: random_peptide_predictions\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/random_peptide_predictions.20190506.tar.bz2\n        default: false\n\n      - name: data_published\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_published.20200501.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.7.0/data_curated.20200427.tar.bz2\n        default: true\n\n      # Older downloads\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_selected_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_selected_no_mass_spec.20180225.tar.bz2\n        default: false\n\n      - name: models_class1_unselected\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_unselected.20180221.tar.bz2\n        default: false\n\n      - name: models_class1_trained_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_trained_with_mass_spec.20180228.tar.bz2\n        default: false\n\n      - name: models_class1_unselected_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_unselected_with_mass_spec.20180227.tar.bz2\n        default: false\n\n      - name: models_class1_minimal\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_minimal.20180226.tar.bz2\n        default: false\n\n  1.6.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1_pan\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_pan.selected.20200204.tar.bz2\n        default: false\n\n      - name: models_class1_presentation\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_presentation.20200205.tar.bz2\n        default: true\n\n      - name: models_class1_processing\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_processing.selected.20200122.tar.bz2\n        default: false\n\n      - name: models_class1_pan_unselected\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_pan.20200104.tar.bz2\n        default: false\n\n      - name: models_class1_pan_variants\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_pan_variants.selected.20200307.tar.bz2\n        default: false\n\n      - name: models_class1_processing_variants\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/models_class1_processing_variants.selected.20200124.tar.bz2\n        default: false\n\n      - name: data_evaluation\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_evaluation.20200209.tar.bz2\n        default: false\n\n      - name: data_mass_spec_benchmark\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ab\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ac\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ad\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ae\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.af\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ag\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ah\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ai\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.aj\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ak\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.al\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.am\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.an\n          - https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_benchmark.20200105.tar.bz2.part.ao\n        default: false\n\n      - name: data_mass_spec_annotated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_mass_spec_annotated.20191226.tar.bz2\n        default: false\n\n      - name: data_references\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/data_references.20190927.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_iedb.20191220.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_systemhcatlas.20190506.tar.bz2\n        default: false\n\n      - name: allele_sequences\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/allele_sequences.20191231.tar.bz2\n        default: false\n\n      - name: random_peptide_predictions\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/random_peptide_predictions.20190506.tar.bz2\n        default: false\n\n      - name: data_published\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_published.20191220.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.6.0/data_curated.20200103.tar.bz2\n        default: true\n\n      # Older downloads\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_selected_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_selected_no_mass_spec.20180225.tar.bz2\n        default: false\n\n      - name: models_class1_unselected\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_unselected.20180221.tar.bz2\n        default: false\n\n      - name: models_class1_trained_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_trained_with_mass_spec.20180228.tar.bz2\n        default: false\n\n      - name: models_class1_unselected_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_unselected_with_mass_spec.20180227.tar.bz2\n        default: false\n\n      - name: models_class1_minimal\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_minimal.20180226.tar.bz2\n        default: false\n\n\n  1.5.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1_pan\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/models_class1_pan.20191221.tar.bz2\n        default: false\n\n      - name: models_class1_pan_unselected\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/models_class1_pan_unselected.20191221.tar.bz2.part.aa\n        default: false\n\n      - name: models_class1_pan_refined\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/models_class1_pan_refined.20191212c.tar.bz2\n        default: false\n\n      - name: models_class1_pan_variants\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/models_class1_pan_variants.20191226.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/models_class1_pan_variants.20191226.tar.bz2.part.ab\n        default: false\n\n      - name: data_mass_spec_benchmark\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.ab\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.ac\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.ad\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.ae\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.af\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.ag\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.ah\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.ai\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.aj\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.ak\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.al\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.am\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_benchmark.20191225.tar.bz2.part.an\n        default: false\n\n      - name: data_mass_spec_annotated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_annotated.20191226.tar.bz2\n        default: false\n\n      - name: data_references\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/data_references.20190927.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_iedb.20191220.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_systemhcatlas.20190506.tar.bz2\n        default: false\n\n      - name: allele_sequences\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/allele_sequences.20190506.tar.bz2\n        default: false\n\n      - name: random_peptide_predictions\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/random_peptide_predictions.20190506.tar.bz2\n        default: false\n\n      - name: data_published\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_published.20191220.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_curated.20191226.tar.bz2\n        default: true\n\n      # Older downloads\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_selected_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_selected_no_mass_spec.20180225.tar.bz2\n        default: false\n\n      - name: models_class1_unselected\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_unselected.20180221.tar.bz2\n        default: false\n\n      - name: models_class1_trained_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_trained_with_mass_spec.20180228.tar.bz2\n        default: false\n\n      - name: models_class1_unselected_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_unselected_with_mass_spec.20180227.tar.bz2\n        default: false\n\n      - name: models_class1_minimal\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_minimal.20180226.tar.bz2\n        default: false\n\n  1.4.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1_pan\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/models_class1_pan.20190928.tar.bz2\n        default: false\n\n      - name: models_class1_pan_unselected\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/models_class1_pan_unselected.20190924.tar.bz2.part.aa\n        default: false\n\n      - name: models_class1_pan_refined\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/models_class1_pan_refined.20191212c.tar.bz2\n        default: false\n\n      - name: models_class1_pan_variants\n        part_urls:\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/models_class1_pan_variants.20191101.tar.bz2.part.aa\n          - https://github.com/openvax/mhcflurry/releases/download/1.4.0/models_class1_pan_variants.20191101.tar.bz2.part.ab\n        default: false\n\n      - name: data_mass_spec_benchmark\n        url: https://www.dropbox.com/s/4wzotlnl58i1w32/data_mass_spec_benchmark.20191027.tar.bz2?dl=1\n        default: false\n\n      - name: data_mass_spec_annotated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_mass_spec_annotated.20191030.tar.bz2\n        default: false\n\n      - name: data_references\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/data_references.20190927.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: https://github.com/openvax/mhcflurry/releases/download/pre-1.4.0/data_iedb.20190916.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_systemhcatlas.20190506.tar.bz2\n        default: false\n\n      - name: allele_sequences\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/allele_sequences.20190506.tar.bz2\n        default: false\n\n      - name: random_peptide_predictions\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/random_peptide_predictions.20190506.tar.bz2\n        default: false\n\n      - name: data_published\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_published.20191030.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: https://github.com/openvax/mhcflurry/releases/download/1.4.0/data_curated.20191030.tar.bz2\n        default: true\n\n      # Older downloads\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_selected_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_selected_no_mass_spec.20180225.tar.bz2\n        default: false\n\n      - name: models_class1_unselected\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_unselected.20180221.tar.bz2\n        default: false\n\n      - name: models_class1_trained_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_trained_with_mass_spec.20180228.tar.bz2\n        default: false\n\n      - name: models_class1_unselected_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_unselected_with_mass_spec.20180227.tar.bz2\n        default: false\n\n      - name: models_class1_minimal\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_minimal.20180226.tar.bz2\n        default: false\n\n\n  1.3.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1_pan\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/models_class1_pan.20190829.tar.bz2\n        default: false\n\n      - name: models_class1_pan_unselected\n        part_urls:\n          - http://github.com/openvax/mhcflurry/releases/download/pan-dev1/models_class1_pan_unselected.20190826.tar.bz2.part.aa\n          - http://github.com/openvax/mhcflurry/releases/download/pan-dev1/models_class1_pan_unselected.20190826.tar.bz2.part.ab\n        default: false\n\n      - name: data_iedb\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_iedb.20190610.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_systemhcatlas.20190506.tar.bz2\n        default: false\n\n      - name: allele_sequences\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/allele_sequences.20190506.tar.bz2\n        default: false\n\n      - name: random_peptide_predictions\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/random_peptide_predictions.20190506.tar.bz2\n        default: false\n\n      - name: data_published\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_published.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: http://github.com/openvax/mhcflurry/releases/download/pan-dev1/data_curated.20190516.tar.bz2\n        default: true\n\n      # Older downloads\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_selected_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_selected_no_mass_spec.20180225.tar.bz2\n        default: false\n\n      - name: models_class1_unselected\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_unselected.20180221.tar.bz2\n        default: false\n\n      - name: models_class1_trained_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_trained_with_mass_spec.20180228.tar.bz2\n        default: false\n\n      - name: models_class1_unselected_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_unselected_with_mass_spec.20180227.tar.bz2\n        default: false\n\n      - name: models_class1_minimal\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_minimal.20180226.tar.bz2\n        default: false\n\n\n\n  1.2.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_selected_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_selected_no_mass_spec.20180225.tar.bz2\n        default: true\n\n      - name: models_class1_unselected\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_unselected.20180221.tar.bz2\n        default: false\n\n      - name: models_class1_trained_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_trained_with_mass_spec.20180228.tar.bz2\n        default: false\n\n      - name: models_class1_unselected_with_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2.1/models_class1_unselected_with_mass_spec.20180227.tar.bz2\n        default: false\n\n      - name: models_class1_minimal\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/models_class1_minimal.20180226.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.0/data_iedb.tar.bz2\n        default: false\n\n      - name: data_published\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/data_published.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/data_systemhcatlas.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.2/data_curated.20180219.tar.bz2\n        default: true\n\n  1.1.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/models_class1.20180205.tar.bz2\n        default: true\n\n      - name: models_class1_no_mass_spec\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/models_class1_no_mass_spec.20180205.tar.bz2\n        default: true\n\n      - name: models_class1_experiments1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/models_class1_experiments1.tar.bz2\n        default: false\n\n      - name: cross_validation_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/cross_validation_class1.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.0/data_iedb.tar.bz2\n        default: false\n\n      - name: data_published\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/data_published.tar.bz2\n        default: false\n\n      - name: data_systemhcatlas\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/data_systemhcatlas.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.1/data_curated.tar.bz2\n        default: true\n\n  1.0.0:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.0/models_class1.tar.bz2\n        default: true\n\n      - name: models_class1_experiments1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.0/models_class1_experiments1.tar.bz2\n        default: false\n\n      - name: cross_validation_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.0/cross_validation_class1.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.0/data_iedb.tar.bz2\n        default: false\n\n      - name: data_kim2014\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/data_kim2014.tar.bz2\n        default: false\n\n      - name: data_curated\n        url: http://github.com/openvax/mhcflurry/releases/download/pre-1.0/data_curated.tar.bz2\n        default: true\n\n  0.9.2:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.2/models_class1.tar.bz2\n        default: true\n\n      - name: data_curated\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/data_curated.tar.bz2\n        default: true\n\n      - name: data_kim2014\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/data_kim2014.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/data_iedb.tar.bz2\n        default: false\n\n      - name: models_class1_experiments1\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.2/models_class1_experiments1.tar.bz2\n        default: false\n\n\n  0.9.1:\n    compatibility-version: 2\n    downloads:\n      - name: models_class1\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/models_class1.tar.bz2\n        default: true\n\n      - name: data_curated\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/data_curated.tar.bz2\n        default: true\n\n      - name: data_kim2014\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/data_kim2014.tar.bz2\n        default: false\n\n      - name: data_iedb\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/data_iedb.tar.bz2\n        default: false\n\n      - name: models_class1_experiments1\n        url: http://github.com/openvax/mhcflurry/releases/download/0.9.1/models_class1_experiments1.tar.bz2\n        default: false\n\n  0.2.0:\n    compatibility-version: 1\n    downloads:\n      - name: models_class1_allele_specific_ensemble\n        url: http://github.com/openvax/mhcflurry/releases/download/0.2.0/models_class1_allele_specific_ensemble.tar.bz2\n        default: true\n\n      - name: models_class1_allele_specific_single\n        url: http://github.com/openvax/mhcflurry/releases/download/0.2.0/models_class1_allele_specific_single.tar.bz2\n        default: false\n\n      - name: data_kim2014\n        url: http://github.com/openvax/mhcflurry/releases/download/0.0.8/data_kim2014.tar.bz2\n        default: true\n\n      - name: data_combined_iedb_kim2014\n        url: http://github.com/openvax/mhcflurry/releases/download/0.0.8/data_combined_iedb_kim2014.tar.bz2\n        default: true\n\n  0.0.8:\n    compatibility-version: 1\n    downloads:\n      - name: models_class1_allele_specific_single\n        url: http://github.com/openvax/mhcflurry/releases/download/0.0.8/models_class1_allele_specific_single.no_impute.tar.bz2\n        default: true\n\n      - name: data_kim2014\n        url: http://github.com/openvax/mhcflurry/releases/download/0.0.8/data_kim2014.tar.bz2\n        default: true\n\n      - name: data_combined_iedb_kim2014\n        url: http://github.com/openvax/mhcflurry/releases/download/0.0.8/data_combined_iedb_kim2014.tar.bz2\n        default: true\n"
  },
  {
    "path": "mhcflurry/downloads_command.py",
    "content": "'''\nDownload MHCflurry released datasets and trained models.\n\nExamples\n\nFetch the default downloads:\n    $ mhcflurry-downloads fetch\n\nFetch a specific download:\n    $ mhcflurry-downloads fetch models_class1_pan\n\nGet the path to a download:\n    $ mhcflurry-downloads path models_class1_pan\n\nGet the URL of a download:\n    $ mhcflurry-downloads url models_class1_pan\n\nSummarize available and fetched downloads:\n    $ mhcflurry-downloads info\n'''\nimport sys\nimport argparse\nimport logging\nimport os\nfrom pipes import quote\nimport errno\nimport tarfile\nfrom shutil import copyfileobj\nfrom tempfile import NamedTemporaryFile\nfrom tqdm import tqdm\n\nimport posixpath\nimport pandas\n\ntry:\n    from urllib.request import urlretrieve\n    from urllib.parse import urlsplit\nexcept ImportError:\n    from urllib import urlretrieve\n    from urlparse import urlsplit\n\nfrom .downloads import (\n    get_current_release,\n    get_current_release_downloads,\n    get_downloads_dir,\n    get_path,\n    ENVIRONMENT_VARIABLES)\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\nparser = argparse.ArgumentParser(\n    description=__doc__,\n    formatter_class=argparse.RawDescriptionHelpFormatter)\n\nparser.add_argument(\n    \"--quiet\",\n    action=\"store_true\",\n    default=False,\n    help=\"Output less\")\n\nparser.add_argument(\n    \"--verbose\",\n    \"-v\",\n    action=\"store_true\",\n    default=False,\n    help=\"Output more\")\n\nsubparsers = parser.add_subparsers(dest=\"subparser_name\")\n\nparser_fetch = subparsers.add_parser('fetch')\nparser_fetch.add_argument(\n    'download_name',\n    metavar=\"DOWNLOAD\",\n    nargs=\"*\",\n    help=\"Items to download\")\nparser_fetch.add_argument(\n    \"--keep\",\n    action=\"store_true\",\n    default=False,\n    help=\"Don't delete archives after they are extracted\")\nparser_fetch.add_argument(\n    \"--release\",\n    default=get_current_release(),\n    help=\"Release to download. Default: %(default)s\")\nparser_fetch.add_argument(\n    \"--already-downloaded-dir\",\n    metavar=\"DIR\",\n    help=\"Don't download files, get them from DIR\")\n\nparser_info = subparsers.add_parser('info')\n\nparser_path = subparsers.add_parser('path')\nparser_path.add_argument(\n    \"download_name\",\n    nargs=\"?\",\n    default='')\n\nparser_url = subparsers.add_parser('url')\nparser_url.add_argument(\n    \"download_name\",\n    nargs=\"?\",\n    default='')\n\n\ndef run(argv=sys.argv[1:]):\n    args = parser.parse_args(argv)\n    if not args.quiet:\n        logging.basicConfig(level=\"INFO\")\n    if args.verbose:\n        logging.basicConfig(level=\"DEBUG\")\n\n    command_functions = {\n        \"fetch\": fetch_subcommand,\n        \"info\": info_subcommand,\n        \"path\": path_subcommand,\n        \"url\": url_subcommand,\n        None: lambda args: parser.print_help(),\n    }\n    command_functions[args.subparser_name](args)\n\n\ndef mkdir_p(path):\n    \"\"\"\n    Make directories as needed, similar to mkdir -p in a shell.\n\n    From:\n    http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python\n    \"\"\"\n    try:\n        os.makedirs(path)\n    except OSError as exc:  # Python >2.5\n        if exc.errno == errno.EEXIST and os.path.isdir(path):\n            pass\n        else:\n            raise\n\n\ndef yes_no(boolean):\n    return \"YES\" if boolean else \"NO\"\n\n\n# For progress bar on download. See https://pypi.python.org/pypi/tqdm\nclass TqdmUpTo(tqdm):\n    \"\"\"Provides `update_to(n)` which uses `tqdm.update(delta_n)`.\"\"\"\n    def update_to(self, b=1, bsize=1, tsize=None):\n        \"\"\"\n        b  : int, optional\n            Number of blocks transferred so far [default: 1].\n        bsize  : int, optional\n            Size of each block (in tqdm units) [default: 1].\n        tsize  : int, optional\n            Total size (in tqdm units). If [default: None] remains unchanged.\n        \"\"\"\n        if tsize is not None:\n            self.total = tsize\n        self.update(b * bsize - self.n)  # will also set self.n = b * bsize\n\n\ndef fetch_subcommand(args):\n    def qprint(msg):\n        if not args.quiet:\n            print(msg)\n\n    if not args.release:\n        raise RuntimeError(\n            \"No release defined. This can happen when you are specifying \"\n            \"a custom models directory. Specify --release to indicate \"\n            \"the release to download.\")\n\n    downloads = get_current_release_downloads()\n    invalid_download_names = set(\n        item for item in args.download_name if item not in downloads)\n    if invalid_download_names:\n        raise ValueError(\"Unknown download(s): %s. Valid downloads are: %s\" % (\n            ', '.join(invalid_download_names), ', '.join(downloads)))\n\n    items_to_fetch = set()\n    for (name, info) in downloads.items():\n        default = not args.download_name and info['metadata']['default']\n        if name in args.download_name and info['downloaded']:\n            print((\n                \"*\" * 40 +\n                \"\\nThe requested download '%s' has already been downloaded. \"\n                \"To re-download this data, first run: \\n\\t%s\\nin a shell \"\n                \"and then re-run this command.\\n\" +\n                \"*\" * 40) % (name, 'rm -rf ' + quote(get_path(name))))\n        if not info['downloaded'] and (name in args.download_name or default):\n            items_to_fetch.add(name)\n\n    mkdir_p(get_downloads_dir())\n\n    qprint(\"Fetching %d/%d downloads from release %s\" % (\n        len(items_to_fetch), len(downloads), args.release))\n    format_string = \"%-40s  %-20s   %-20s  %-20s \"\n    qprint(format_string % (\n        \"DOWNLOAD NAME\", \"ALREADY DOWNLOADED?\", \"WILL DOWNLOAD NOW?\", \"URL\"))\n\n    for (item, info) in downloads.items():\n        urls = (\n            [info['metadata'][\"url\"]]\n            if \"url\" in info['metadata']\n            else info['metadata'][\"part_urls\"])\n        url_description = urls[0]\n        if len(urls) > 1:\n            url_description += \" + %d more parts\" % (len(urls) - 1)\n\n        qprint(format_string % (\n            item,\n            yes_no(info['downloaded']),\n            yes_no(item in items_to_fetch),\n            url_description))\n\n    # TODO: may want to extract into somewhere temporary and then rename to\n    # avoid making an incomplete extract if the process is killed.\n    for item in items_to_fetch:\n        metadata = downloads[item]['metadata']\n        urls = (\n            [metadata[\"url\"]] if \"url\" in metadata else metadata[\"part_urls\"])\n        temp = NamedTemporaryFile(delete=False, suffix=\".tar.bz2\")\n        try:\n            for (url_num, url) in enumerate(urls):\n                delete_downloaded = True\n                if args.already_downloaded_dir:\n                    filename = posixpath.basename(urlsplit(url).path)\n                    downloaded_path = os.path.join(\n                        args.already_downloaded_dir, filename)\n                    delete_downloaded = False\n                else:\n                    qprint(\"Downloading [part %d/%d]: %s\" % (\n                        url_num + 1, len(urls), url))\n                    (downloaded_path, _) = urlretrieve(\n                        url,\n                        temp.name if len(urls) == 1 else None,\n                        reporthook=TqdmUpTo(\n                            unit='B', unit_scale=True, miniters=1).update_to)\n                    qprint(\"Downloaded to: %s\" % quote(downloaded_path))\n\n                if downloaded_path != temp.name:\n                    qprint(\"Copying to: %s\" % temp.name)\n                    with open(downloaded_path, \"rb\") as fd:\n                        copyfileobj(fd, temp, length=64*1024*1024)\n                    if delete_downloaded:\n                        os.remove(downloaded_path)\n\n            temp.close()\n            tar = tarfile.open(temp.name, 'r:bz2')\n            names = tar.getnames()\n            logging.debug(\"Extracting: %s\" % names)\n            bad_names = [\n                n for n in names\n                if n.strip().startswith(\"/\") or n.strip().startswith(\"..\")\n            ]\n            if bad_names:\n                raise RuntimeError(\n                    \"Archive has suspicious names: %s\" % bad_names)\n            result_dir = get_path(item, test_exists=False)\n            os.mkdir(result_dir)\n\n            for member in tqdm(tar.getmembers(), desc='Extracting'):\n                tar.extractall(path=result_dir, members=[member])\n            tar.close()\n\n            # Save URLs that were used for this download.\n            pandas.DataFrame({\"url\": urls}).to_csv(\n                os.path.join(result_dir, \"DOWNLOAD_INFO.csv\"), index=False)\n            qprint(\"Extracted %d files to: %s\" % (\n                len(names), quote(result_dir)))\n        finally:\n            if not args.keep:\n                os.remove(temp.name)\n\n\ndef info_subcommand(args):\n    print(\"Environment variables\")\n    for variable in ENVIRONMENT_VARIABLES:\n        value = os.environ.get(variable)\n        if value:\n            print('  %-35s = %s' % (variable, quote(value)))\n        else:\n            print(\"  %-35s [unset or empty]\" % variable)\n\n    print(\"\")\n    print(\"Configuration\")\n\n    def exists_string(path):\n        return (\n            \"exists\" if os.path.exists(path) else \"does not exist\")\n\n    items = [\n        (\"current release\", get_current_release(), \"\"),\n        (\"downloads dir\",\n            get_downloads_dir(),\n            \"[%s]\" % exists_string(get_downloads_dir())),\n    ]\n    for (key, value, extra) in items:\n        print(\"  %-35s = %-20s %s\" % (key, quote(value), extra))\n\n    print(\"\")\n\n    downloads = get_current_release_downloads()\n\n    format_string = \"%-40s  %-12s  %-12s  %-20s \"\n    print(format_string % (\"DOWNLOAD NAME\", \"DOWNLOADED?\", \"UP TO DATE?\", \"URL\"))\n\n    for (item, info) in downloads.items():\n        urls = (\n            [info['metadata'][\"url\"]]\n            if \"url\" in info['metadata']\n            else info['metadata'][\"part_urls\"])\n        url_description = urls[0]\n        if len(urls) > 1:\n            url_description += \" + %d more parts\" % (len(urls) - 1)\n\n        print(format_string % (\n            item,\n            yes_no(info['downloaded']),\n            \"\" if not info['downloaded'] else (\n                \"UNKNOWN\" if info['up_to_date'] is None\n                else yes_no(info['up_to_date'])\n            ),\n            url_description))\n\n\ndef path_subcommand(args):\n    \"\"\"\n    Print the local path to a download\n    \"\"\"\n    print(get_path(args.download_name))\n\n\ndef url_subcommand(args):\n    \"\"\"\n    Print the URL(s) for a download\n    \"\"\"\n    downloads = get_current_release_downloads()\n    download = downloads[args.download_name][\"metadata\"]\n    urls = []\n    if download.get(\"url\"):\n        urls.append(download[\"url\"])\n    if download.get(\"part_urls\"):\n        urls.extend(download[\"part_urls\"])\n    print(\"\\n\".join(urls))\n"
  },
  {
    "path": "mhcflurry/encodable_sequences.py",
    "content": "\"\"\"\nClass for encoding variable-length peptides to fixed-size numerical matrices\n\"\"\"\nimport math\n\nimport numpy\nimport pandas\n\nfrom . import amino_acid\n\n\nclass EncodingError(ValueError):\n    \"\"\"\n    Exception raised when peptides cannot be encoded\n    \"\"\"\n    def __init__(self, message, supported_peptide_lengths):\n        self.supported_peptide_lengths = supported_peptide_lengths\n        ValueError.__init__(\n            self,\n            message + \" Supported lengths: %s - %s.\" % supported_peptide_lengths)\n\n\nclass EncodableSequences(object):\n    \"\"\"\n    Class for encoding variable-length peptides to fixed-size numerical matrices\n\n    This class caches various encodings of a list of sequences.\n\n    In practice this is used only for peptides. To encode MHC allele sequences,\n    see AlleleEncoding.\n    \"\"\"\n    unknown_character = \"X\"\n\n    @classmethod\n    def create(klass, sequences):\n        \"\"\"\n        Factory that returns an EncodableSequences given a list of\n        strings. As a convenience, you can also pass it an EncodableSequences\n        instance, in which case the object is returned unchanged.\n        \"\"\"\n        if isinstance(sequences, klass):\n            return sequences\n        return klass(sequences)\n\n    def __init__(self, sequences):\n        if not all(isinstance(obj, str) for obj in sequences):\n            raise ValueError(\"Sequence of strings is required\")\n        self.sequences = numpy.array(sequences)\n        lengths = pandas.Series(self.sequences, dtype=numpy.object_).str.len()\n\n        self.min_length = lengths.min()\n        self.max_length = lengths.max()\n\n        self.encoding_cache = {}\n        self.fixed_sequence_length = None\n        if len(self.sequences) > 0 and all(\n                len(s) == len(self.sequences[0]) for s in self.sequences):\n            self.fixed_sequence_length = len(self.sequences[0])\n\n    def __len__(self):\n        return len(self.sequences)\n\n    def variable_length_to_fixed_length_categorical(\n            self,\n            alignment_method=\"pad_middle\",\n            left_edge=4,\n            right_edge=4,\n            max_length=15):\n        \"\"\"\n        Encode variable-length sequences to a fixed-size index-encoded (integer)\n        matrix.\n\n        See `sequences_to_fixed_length_index_encoded_array` for details.\n\n        Parameters\n        ----------\n        alignment_method : string\n            One of \"pad_middle\" or \"left_pad_right_pad\"\n        left_edge : int, size of fixed-position left side\n            Only relevant for pad_middle alignment method\n        right_edge : int, size of the fixed-position right side\n            Only relevant for pad_middle alignment method\n        max_length : maximum supported peptide length\n\n        Returns\n        -------\n        numpy.array of integers with shape (num sequences, encoded length)\n\n        For pad_middle, the encoded length is max_length. For left_pad_right_pad,\n        it's 3 * max_length.\n        \"\"\"\n\n        cache_key = (\n            \"fixed_length_categorical\",\n            alignment_method,\n            left_edge,\n            right_edge,\n            max_length)\n\n        if cache_key not in self.encoding_cache:\n            fixed_length_sequences = (\n                self.sequences_to_fixed_length_index_encoded_array(\n                    self.sequences,\n                    alignment_method=alignment_method,\n                    left_edge=left_edge,\n                    right_edge=right_edge,\n                    max_length=max_length))\n            self.encoding_cache[cache_key] = fixed_length_sequences\n        return self.encoding_cache[cache_key]\n\n    def variable_length_to_fixed_length_vector_encoding(\n            self,\n            vector_encoding_name,\n            alignment_method=\"pad_middle\",\n            left_edge=4,\n            right_edge=4,\n            max_length=15,\n            trim=False,\n            allow_unsupported_amino_acids=False):\n        \"\"\"\n        Encode variable-length sequences to a fixed-size matrix. Amino acids\n        are encoded as specified by the vector_encoding_name argument.\n\n        See `sequences_to_fixed_length_index_encoded_array` for details.\n\n        See also: variable_length_to_fixed_length_categorical.\n\n        Parameters\n        ----------\n        vector_encoding_name : string\n            How to represent amino acids.\n            One of \"BLOSUM62\", \"one-hot\", etc. Full list of supported vector\n            encodings is given by available_vector_encodings().\n        alignment_method : string\n            One of \"pad_middle\" or \"left_pad_right_pad\"\n        left_edge : int\n            Size of fixed-position left side.\n            Only relevant for pad_middle alignment method\n        right_edge : int\n            Size of the fixed-position right side.\n            Only relevant for pad_middle alignment method\n        max_length : int\n            Maximum supported peptide length\n        trim : bool\n            If True, longer sequences will be trimmed to fit the maximum\n            supported length. Not supported for all alignment methods.\n        allow_unsupported_amino_acids : bool\n            If True, non-canonical amino acids will be replaced with the X\n            character before encoding.\n\n        Returns\n        -------\n        numpy.array with shape (num sequences, encoded length, m)\n\n        where\n            - m is the vector encoding length (usually 21).\n            - encoded length is max_length if alignment_method is pad_middle;\n              3 * max_length if it's left_pad_right_pad.\n        \"\"\"\n        cache_key = (\n            \"fixed_length_vector_encoding\",\n            vector_encoding_name,\n            alignment_method,\n            left_edge,\n            right_edge,\n            max_length,\n            trim,\n            allow_unsupported_amino_acids)\n        if cache_key not in self.encoding_cache:\n            fixed_length_sequences = (\n                self.sequences_to_fixed_length_index_encoded_array(\n                    self.sequences,\n                    alignment_method=alignment_method,\n                    left_edge=left_edge,\n                    right_edge=right_edge,\n                    max_length=max_length,\n                    trim=trim,\n                    allow_unsupported_amino_acids=allow_unsupported_amino_acids))\n            result = amino_acid.fixed_vectors_encoding(\n                fixed_length_sequences,\n                amino_acid.ENCODING_DATA_FRAMES[vector_encoding_name])\n            assert result.shape[0] == len(self.sequences)\n            self.encoding_cache[cache_key] = result\n        return self.encoding_cache[cache_key]\n\n    @classmethod\n    def sequences_to_fixed_length_index_encoded_array(\n            klass,\n            sequences,\n            alignment_method=\"pad_middle\",\n            left_edge=4,\n            right_edge=4,\n            max_length=15,\n            trim=False,\n            allow_unsupported_amino_acids=False):\n        \"\"\"\n        Encode variable-length sequences to a fixed-size index-encoded (integer)\n        matrix.\n\n        How variable length sequences get mapped to fixed length is set by the\n        \"alignment_method\" argument. Supported alignment methods are:\n\n            pad_middle\n                Encoding designed for preserving the anchor positions of class\n                I peptides. This is what is used in allele-specific models.\n\n                Each string must be of length at least left_edge + right_edge\n                and at most max_length. The first left_edge characters in the\n                input always map to the first left_edge characters in the\n                output. Similarly for the last right_edge characters. The\n                middle characters are filled in based on the length, with the\n                X character filling in the blanks.\n\n                Example:\n\n                AAAACDDDD -> AAAAXXXCXXXDDDD\n\n            left_pad_centered_right_pad\n                Encoding that makes no assumptions on anchor positions but is\n                3x larger than pad_middle, since it duplicates the peptide\n                (left aligned + centered + right aligned). This is what is used\n                for the pan-allele models.\n\n                Example:\n\n                AAAACDDDD -> AAAACDDDDXXXXXXXXXAAAACDDDDXXXXXXXXXAAAACDDDD\n\n            left_pad_right_pad\n                Same as left_pad_centered_right_pad but only includes left-\n                and right-padded peptide.\n\n                Example:\n\n                AAAACDDDD -> AAAACDDDDXXXXXXXXXXXXAAAACDDDD\n\n        Parameters\n        ----------\n        sequences : list of string\n        alignment_method : string\n            One of \"pad_middle\" or \"left_pad_right_pad\"\n        left_edge : int\n            Size of fixed-position left side.\n            Only relevant for pad_middle alignment method\n        right_edge : int\n            Size of the fixed-position right side.\n            Only relevant for pad_middle alignment method\n        max_length : int\n            maximum supported peptide length\n        trim : bool\n            If True, longer sequences will be trimmed to fit the maximum\n            supported length. Not supported for all alignment methods.\n        allow_unsupported_amino_acids : bool\n            If True, non-canonical amino acids will be replaced with the X\n            character before encoding.\n\n        Returns\n        -------\n        numpy.array of integers with shape (num sequences, encoded length)\n\n        For pad_middle, the encoded length is max_length. For left_pad_right_pad,\n        it's 2 * max_length. For left_pad_centered_right_pad, it's\n        3 * max_length.\n        \"\"\"\n        if allow_unsupported_amino_acids:\n            fill_value = amino_acid.AMINO_ACID_INDEX['X']\n\n            def get_amino_acid_index(a):\n                return amino_acid.AMINO_ACID_INDEX.get(a, fill_value)\n        else:\n            get_amino_acid_index = amino_acid.AMINO_ACID_INDEX.__getitem__\n\n        result = None\n        if alignment_method == 'pad_middle':\n            if trim:\n                raise NotImplementedError(\"trim not supported\")\n\n            # Result array is int32, filled with X (null amino acid) value.\n            result = numpy.full(\n                fill_value=amino_acid.AMINO_ACID_INDEX['X'],\n                shape=(len(sequences), max_length),\n                dtype=\"int32\")\n\n            df = pandas.DataFrame({\"peptide\": sequences}, dtype=numpy.object_)\n            df[\"length\"] = df.peptide.str.len()\n\n            middle_length = max_length - left_edge - right_edge\n            min_length = left_edge + right_edge\n\n            # For efficiency we handle each supported peptide length using bulk\n            # array operations.\n            for (length, sub_df) in df.groupby(\"length\"):\n                if length < min_length or length > max_length:\n                    raise EncodingError(\n                        \"Sequence '%s' (length %d) unsupported. There are %d \"\n                        \"total peptides with this length.\" % (\n                            sub_df.iloc[0].peptide,\n                            length,\n                            len(sub_df)), supported_peptide_lengths=(\n                                min_length, max_length))\n\n                # Array of shape (num peptides, length) giving fixed-length\n                # amino acid encoding each peptide of the current length.\n                fixed_length_sequences = numpy.stack(\n                    sub_df.peptide.map(\n                        lambda s: numpy.array([\n                            get_amino_acid_index(char) for char in s\n                        ])).values)\n\n                num_null = max_length - length\n                num_null_left = int(math.ceil(num_null / 2))\n                num_middle_filled = middle_length - num_null\n                middle_start = left_edge + num_null_left\n\n                # Set left edge\n                result[sub_df.index, :left_edge] = fixed_length_sequences[\n                    :, :left_edge\n                ]\n\n                # Set middle.\n                result[\n                    sub_df.index,\n                    middle_start : middle_start + num_middle_filled\n                ] = fixed_length_sequences[\n                    :, left_edge : left_edge + num_middle_filled\n                ]\n\n                # Set right edge.\n                result[\n                    sub_df.index,\n                    -right_edge:\n                ] = fixed_length_sequences[:, -right_edge:]\n        elif alignment_method == \"left_pad_right_pad\":\n            if trim:\n                raise NotImplementedError(\"trim not supported\")\n\n            # We arbitrarily set a minimum length of 5, although this encoding\n            # could handle smaller peptides.\n            min_length = 5\n\n            # Result array is int32, filled with X (null amino acid) value.\n            result = numpy.full(\n                fill_value=amino_acid.AMINO_ACID_INDEX['X'],\n                shape=(len(sequences), max_length * 2),\n                dtype=\"int32\")\n\n            df = pandas.DataFrame({\"peptide\": sequences}, dtype=numpy.object_)\n\n            # For efficiency we handle each supported peptide length using bulk\n            # array operations.\n            for (length, sub_df) in df.groupby(df.peptide.str.len()):\n                if length < min_length or length > max_length:\n                    raise EncodingError(\n                        \"Sequence '%s' (length %d) unsupported. There are %d \"\n                        \"total peptides with this length.\" % (\n                            sub_df.iloc[0].peptide,\n                            length,\n                            len(sub_df)), supported_peptide_lengths=(\n                                min_length, max_length))\n\n                # Array of shape (num peptides, length) giving fixed-length\n                # amino acid encoding each peptide of the current length.\n                fixed_length_sequences = numpy.stack(sub_df.peptide.map(\n                    lambda s: numpy.array([\n                        get_amino_acid_index(char) for char in s\n                    ])).values)\n\n                # Set left edge\n                result[sub_df.index, :length] = fixed_length_sequences\n\n                # Set right edge.\n                result[sub_df.index, -length:] = fixed_length_sequences\n        elif alignment_method == \"left_pad_centered_right_pad\":\n            if trim:\n                raise NotImplementedError(\"trim not supported\")\n\n            # We arbitrarily set a minimum length of 5, although this encoding\n            # could handle smaller peptides.\n            min_length = 5\n\n            # Result array is int32, filled with X (null amino acid) value.\n            result = numpy.full(\n                fill_value=amino_acid.AMINO_ACID_INDEX['X'],\n                shape=(len(sequences), max_length * 3),\n                dtype=\"int32\")\n\n            df = pandas.DataFrame({\"peptide\": sequences}, dtype=numpy.object_)\n\n            # For efficiency we handle each supported peptide length using bulk\n            # array operations.\n            for (length, sub_df) in df.groupby(df.peptide.str.len()):\n                if length < min_length or length > max_length:\n                    raise EncodingError(\n                        \"Sequence '%s' (length %d) unsupported. There are %d \"\n                        \"total peptides with this length.\" % (\n                            sub_df.iloc[0].peptide,\n                            length,\n                            len(sub_df)), supported_peptide_lengths=(\n                                min_length, max_length))\n\n                # Array of shape (num peptides, length) giving fixed-length\n                # amino acid encoding each peptide of the current length.\n                fixed_length_sequences = numpy.stack(sub_df.peptide.map(\n                    lambda s: numpy.array([\n                        get_amino_acid_index(char) for char in s\n                    ])).values)\n\n                # Set left edge\n                result[sub_df.index, :length] = fixed_length_sequences\n\n                # Set right edge.\n                result[sub_df.index, -length:] = fixed_length_sequences\n\n                # Set center.\n                center_left_padding = int(\n                    math.floor((max_length - length) / 2))\n                center_left_offset = max_length + center_left_padding\n                result[\n                    sub_df.index,\n                    center_left_offset : center_left_offset + length\n                ] = fixed_length_sequences\n        elif alignment_method in (\"right_pad\", \"left_pad\"):\n            min_length = 1\n\n            # Result array is int32, filled with X (null amino acid) value.\n            result = numpy.full(\n                fill_value=amino_acid.AMINO_ACID_INDEX['X'],\n                shape=(len(sequences), max_length),\n                dtype=\"int32\")\n\n            df = pandas.DataFrame({\"peptide\": sequences}, dtype=numpy.object_)\n\n            # For efficiency we handle each supported peptide length using bulk\n            # array operations.\n            for (length, sub_df) in df.groupby(df.peptide.str.len()):\n                if length < min_length or (not trim and length > max_length):\n                    raise EncodingError(\n                        \"Sequence '%s' (length %d) unsupported. There are %d \"\n                        \"total peptides with this length.\" % (\n                            sub_df.iloc[0].peptide,\n                            length,\n                            len(sub_df)), supported_peptide_lengths=(\n                                min_length, max_length))\n\n                peptides = sub_df.peptide\n                if length > max_length:\n                    # Trim.\n                    if alignment_method == \"right_pad\":\n                        peptides = peptides.str.slice(0, max_length)\n                    else:\n                        peptides = peptides.str.slice(length - max_length)\n\n                # Array of shape (num peptides, length) giving fixed-length\n                # amino acid encoding each peptide of the current length.\n                fixed_length_sequences = numpy.stack(peptides.map(\n                    lambda s: numpy.array([\n                        get_amino_acid_index(char) for char in s\n                    ])).values)\n\n                if alignment_method == \"right_pad\":\n                    # Left align (i.e. pad right): set left edge\n                    result[sub_df.index, :length] = fixed_length_sequences\n                else:\n                    # Right align: set right edge.\n                    result[sub_df.index, -length:] = fixed_length_sequences\n\n        else:\n            raise NotImplementedError(\n                \"Unsupported alignment method: %s\" % alignment_method)\n\n\n        return result\n"
  },
  {
    "path": "mhcflurry/ensemble_centrality.py",
    "content": "\"\"\"\nMeasures of centrality (e.g. mean) used to combine predictions across an\nensemble. The input to these functions are log affinities, and they are expected\nto return a centrality measure also in log-space.\n\"\"\"\n\nimport numpy\n\n\ndef _nanmean_no_warnings(log_values):\n    \"\"\"\n    Row-wise nanmean that returns nan for all-nan rows without warnings.\n    \"\"\"\n    valid = ~numpy.isnan(log_values)\n    counts = valid.sum(axis=1).astype(\"float64\")\n    sums = numpy.where(valid, log_values, 0.0).sum(axis=1)\n    result = numpy.full(log_values.shape[0], numpy.nan, dtype=\"float64\")\n    numpy.divide(sums, counts, out=result, where=counts > 0)\n    return result\n\n\ndef _nanmedian_no_warnings(log_values):\n    \"\"\"\n    Row-wise nanmedian that returns nan for all-nan rows without warnings.\n    \"\"\"\n    result = numpy.full(log_values.shape[0], numpy.nan, dtype=\"float64\")\n    row_has_values = (~numpy.isnan(log_values)).any(axis=1)\n    if row_has_values.any():\n        result[row_has_values] = numpy.nanmedian(log_values[row_has_values], axis=1)\n    return result\n\n\ndef robust_mean(log_values):\n    \"\"\"\n    Mean of values falling within the 25-75 percentiles.\n\n    Parameters\n    ----------\n    log_values : 2-d numpy.array\n        Center is computed along the second axis (i.e. per row).\n\n    Returns\n    -------\n    center : numpy.array of length log_values.shape[1]\n\n    \"\"\"\n    if log_values.shape[1] <= 3:\n        # Too few values to use robust mean.\n        return _nanmean_no_warnings(log_values)\n\n    result = numpy.full(log_values.shape[0], numpy.nan, dtype=\"float64\")\n    row_has_values = (~numpy.isnan(log_values)).any(axis=1)\n    if not row_has_values.any():\n        return result\n\n    valid_rows = log_values[row_has_values]\n    without_nans = numpy.nan_to_num(valid_rows)  # replace nan with 0\n    p75 = numpy.nanpercentile(valid_rows, 75, axis=1).reshape((-1, 1))\n    p25 = numpy.nanpercentile(valid_rows, 25, axis=1).reshape((-1, 1))\n    mask = (\n        (~numpy.isnan(valid_rows)) &\n        (without_nans <= p75) &\n        (without_nans >= p25))\n    mask_f = mask.astype(\"float64\")\n    numerator = (without_nans * mask_f).sum(axis=1)\n    denominator = mask_f.sum(axis=1)\n    robust = numpy.full(valid_rows.shape[0], numpy.nan, dtype=\"float64\")\n    numpy.divide(numerator, denominator, out=robust, where=denominator > 0)\n    result[row_has_values] = robust\n    return result\n\n\nCENTRALITY_MEASURES = {\n    \"mean\": _nanmean_no_warnings,\n    \"median\": _nanmedian_no_warnings,\n    \"robust_mean\": robust_mean,\n}\n"
  },
  {
    "path": "mhcflurry/fasta.py",
    "content": "\"\"\"\nAdapted from pyensembl, github.com/openvax/pyensembl\nOriginal implementation by Alex Rubinsteyn.\n\nThe worse sin in bioinformatics is to write your own FASTA parser.\nWe're doing this to avoid adding another dependency to MHCflurry, however.\n\"\"\"\n\nfrom gzip import GzipFile\nimport logging\n\nimport pandas\n\n\ndef read_fasta_to_dataframe(filename, full_descriptions=False):\n    \"\"\"\n    Parse a fasta file to a pandas DataFrame.\n\n    Parameters\n    ----------\n    filename : string\n    full_descriptions : bool\n        If true, instead of returning sequence IDs (the first space-separated\n        token), return the full description associated with each record.\n    Returns\n    -------\n    pandas.DataFrame with columns \"sequence_id\" and \"sequence\".\n    \"\"\"\n    reader = FastaParser()\n    rows = reader.iterate_over_file(\n        filename, full_descriptions=full_descriptions)\n    return pandas.DataFrame(\n        rows,\n        columns=[\"sequence_id\", \"sequence\"])\n\n\nclass FastaParser(object):\n    \"\"\"\n    FastaParser object consumes lines of a FASTA file incrementally.\n    \"\"\"\n    def __init__(self):\n        self.current_id = None\n        self.current_lines = []\n\n    def iterate_over_file(self, fasta_path, full_descriptions=False):\n        \"\"\"\n        Generator that yields identifiers paired with sequences.\n        \"\"\"\n        with self.open_file(fasta_path) as f:\n            for line in f:\n                line = line.rstrip()\n\n                if len(line) == 0:\n                    continue\n\n                # have to slice into a bytes object or else get a single integer\n                first_char = line[0:1]\n\n                if first_char == b\">\":\n                    previous_entry = self._current_entry()\n                    self.current_id = self._parse_header_id(\n                        line, full_description=full_descriptions)\n\n                    if len(self.current_id) == 0:\n                        logging.warning(\n                            \"Unable to parse ID from header line: %s\", line)\n\n                    self.current_lines = []\n\n                    if previous_entry is not None:\n                        yield previous_entry\n\n                elif first_char == b\";\":\n                    # semicolon are comment characters\n                    continue\n                else:\n                    self.current_lines.append(line)\n\n        # the last sequence is still in the lines buffer after we're done with\n        # the file so make sure to yield it\n        id_and_seq = self._current_entry()\n        if id_and_seq is not None:\n            yield id_and_seq\n\n    def _current_entry(self):\n        # when we hit a new entry, if this isn't the first\n        # entry of the file then put the last one in the dictionary\n        if self.current_id:\n            if len(self.current_lines) == 0:\n                logging.warning(\"No sequence data for '%s'\", self.current_id)\n            else:\n                sequence = b\"\".join(self.current_lines).decode(\"ascii\")\n                return self.current_id, sequence\n\n    @staticmethod\n    def open_file(fasta_path):\n        \"\"\"\n        Open either a text file or compressed gzip file as a stream of bytes.\n        \"\"\"\n        if fasta_path.endswith(\"gz\") or fasta_path.endswith(\"gzip\"):\n            return GzipFile(fasta_path, 'rb')\n        else:\n            return open(fasta_path, 'rb')\n\n    @staticmethod\n    def _parse_header_id(line, full_description=False):\n        \"\"\"\n        Pull the transcript or protein identifier from the header line\n        which starts with '>'\n        \"\"\"\n        if type(line) is not bytes:\n            raise TypeError(\"Expected header line to be of type %s but got %s\" % (\n                bytes, type(line)))\n\n        if len(line) <= 1:\n            raise ValueError(\"No identifier on FASTA line\")\n\n        # split line at first space to get the unique identifier for\n        # this sequence\n        space_index = line.find(b\" \")\n        if space_index >= 0 and not full_description:\n            identifier = line[1:space_index]\n        else:\n            identifier = line[1:]\n\n        return identifier.decode(\"ascii\")\n"
  },
  {
    "path": "mhcflurry/flanking_encoding.py",
    "content": "\"\"\"\nClass for encoding variable-length flanking and peptides to\nfixed-size numerical matrices\n\"\"\"\nfrom collections import namedtuple\nimport logging\n\nfrom .encodable_sequences import EncodingError, EncodableSequences\n\nimport numpy\nimport pandas\n\n\nEncodingResult =  namedtuple(\n    \"EncodingResult\", [\"array\", \"peptide_lengths\"])\n\n\nclass FlankingEncoding(object):\n    \"\"\"\n    Encode peptides and optionally their N- and C-flanking sequences into fixed\n    size numerical matrices. Similar to EncodableSequences but with support\n    for flanking sequences and the encoding scheme used by the processing\n    predictor.\n\n    Instances of this class have an immutable list of peptides with\n    flanking sequences. Encodings are cached in the instances for faster\n    performance when the same set of peptides needs to encoded more than once.\n    \"\"\"\n    unknown_character = \"X\"\n\n    def __init__(self, peptides, n_flanks, c_flanks):\n        \"\"\"\n        Constructor. Sequences of any lengths can be passed.\n\n        Parameters\n        ----------\n        peptides : list of string\n            Peptide sequences\n        n_flanks : list of string [same length as peptides]\n            Upstream sequences\n        c_flanks : list of string [same length as peptides]\n            Downstream sequences\n        \"\"\"\n        self.dataframe = pandas.DataFrame({\n            \"peptide\": peptides,\n            \"n_flank\": n_flanks,\n            \"c_flank\": c_flanks,\n        }, dtype=str)\n        self.encoding_cache = {}\n\n    def __len__(self):\n        \"\"\"\n        Number of peptides.\n        \"\"\"\n        return len(self.dataframe)\n\n    def vector_encode(\n            self,\n            vector_encoding_name,\n            peptide_max_length,\n            n_flank_length,\n            c_flank_length,\n            allow_unsupported_amino_acids=True,\n            throw=True):\n        \"\"\"\n        Encode variable-length sequences to a fixed-size matrix.\n\n        Parameters\n        ----------\n        vector_encoding_name : string\n            How to represent amino acids. One of \"BLOSUM62\", \"one-hot\", etc.\n            See `amino_acid.available_vector_encodings()`.\n        peptide_max_length : int\n            Maximum supported peptide length.\n        n_flank_length : int\n            Maximum supported N-flank length\n        c_flank_length : int\n            Maximum supported C-flank length\n        allow_unsupported_amino_acids : bool\n            If True, non-canonical amino acids will be replaced with the X\n            character before encoding.\n        throw : bool\n            Whether to raise exception on unsupported peptides\n\n        Returns\n        -------\n        numpy.array with shape (num sequences, length, m)\n\n        where\n            - num sequences is number of peptides, i.e. len(self)\n            - length is peptide_max_length + n_flank_length + c_flank_length\n            - m is the vector encoding length (usually 21).\n        \"\"\"\n        cache_key = (\n            \"vector_encode\",\n            vector_encoding_name,\n            peptide_max_length,\n            n_flank_length,\n            c_flank_length,\n            allow_unsupported_amino_acids,\n            throw)\n        if cache_key not in self.encoding_cache:\n            result = self.encode(\n                vector_encoding_name=vector_encoding_name,\n                df=self.dataframe,\n                peptide_max_length=peptide_max_length,\n                n_flank_length=n_flank_length,\n                c_flank_length=c_flank_length,\n                allow_unsupported_amino_acids=allow_unsupported_amino_acids,\n                throw=throw)\n            self.encoding_cache[cache_key] = result\n        return self.encoding_cache[cache_key]\n\n    @staticmethod\n    def encode(\n            vector_encoding_name,\n            df,\n            peptide_max_length,\n            n_flank_length,\n            c_flank_length,\n            allow_unsupported_amino_acids=False,\n            throw=True):\n        \"\"\"\n        Encode variable-length sequences to a fixed-size matrix.\n\n        Helper function. Users should use `vector_encode`.\n\n        Parameters\n        ----------\n        vector_encoding_name : string\n        df : pandas.DataFrame\n        peptide_max_length : int\n        n_flank_length : int\n        c_flank_length : int\n        allow_unsupported_amino_acids : bool\n        throw : bool\n\n        Returns\n        -------\n        numpy.array\n        \"\"\"\n        error_df = df.loc[\n            (df.peptide.str.len() > peptide_max_length) |\n            (df.peptide.str.len() < 1)\n        ]\n        if len(error_df) > 0:\n            message = (\n                \"Sequence '%s' (length %d) unsupported. There are %d \"\n                \"total peptides with this length.\" % (\n                    error_df.iloc[0].peptide,\n                    len(error_df.iloc[0].peptide),\n                    len(error_df)))\n            if throw:\n                raise EncodingError(\n                    message,\n                    supported_peptide_lengths=(1, peptide_max_length + 1))\n            logging.warning(message)\n\n            # Replace invalid peptides with X's. The encoding will be set to\n            # NaNs for these peptides farther below.\n            df.loc[error_df.index, \"peptide\"] = \"X\" * peptide_max_length\n\n        if n_flank_length > 0:\n            n_flanks = df.n_flank.str.pad(\n                n_flank_length,\n                side=\"left\",\n                fillchar=\"X\").str.slice(-n_flank_length).str.upper()\n        else:\n            n_flanks = pandas.Series([\"\"] * len(df), dtype=str)\n\n        c_flanks = df.c_flank.str.pad(\n            c_flank_length,\n            side=\"right\",\n            fillchar=\"X\").str.slice(0, c_flank_length).str.upper()\n        peptides = df.peptide.str.upper()\n\n        concatenated = n_flanks + peptides + c_flanks\n\n        encoder = EncodableSequences.create(concatenated.values)\n        array = encoder.variable_length_to_fixed_length_vector_encoding(\n            vector_encoding_name=vector_encoding_name,\n            alignment_method=\"right_pad\",\n            max_length=n_flank_length + peptide_max_length + c_flank_length,\n            allow_unsupported_amino_acids=allow_unsupported_amino_acids)\n\n        array = array.astype(\"float32\")  # So NaNs can be used.\n\n        if len(error_df) > 0:\n            array[error_df.index] = numpy.nan\n\n        result = EncodingResult(\n            array, peptide_lengths=peptides.str.len().values)\n\n        return result\n"
  },
  {
    "path": "mhcflurry/hyperparameters.py",
    "content": "\"\"\"\nHyperparameter (neural network options) management\n\"\"\"\nimport itertools\n\n\nclass HyperparameterDefaults(object):\n    \"\"\"\n    Class for managing hyperparameters. Thin wrapper around a dict.\n\n    Instances of this class are a specification of the hyperparameters\n    *supported* by a model and their defaults. The particular\n    hyperparameter settings to be used, for example, to train a model\n    are kept in plain dicts.\n    \"\"\"\n    def __init__(self, **defaults):\n        self.defaults = dict(defaults)\n\n    def extend(self, other):\n        \"\"\"\n        Return a new HyperparameterDefaults instance containing the\n        hyperparameters from the current instance combined with\n        those from other.\n\n        It is an error if self and other have any hyperparameters in\n        common.\n        \"\"\"\n        overlap = [key for key in other.defaults if key in self.defaults]\n        if overlap:\n            raise ValueError(\n                \"Duplicate hyperparameter(s): %s\" % \" \".join(overlap))\n        new = dict(self.defaults)\n        new.update(other.defaults)\n        return HyperparameterDefaults(**new)\n\n    def with_defaults(self, obj):\n        \"\"\"\n        Given a dict of hyperparameter settings, return a dict containing\n        those settings augmented by the defaults for any keys missing from\n        the dict.\n        \"\"\"\n        self.check_valid_keys(obj)\n        obj = dict(obj)\n        for (key, value) in self.defaults.items():\n            if key not in obj:\n                obj[key] = value\n        return obj\n\n    def subselect(self, obj):\n        \"\"\"\n        Filter a dict of hyperparameter settings to only those keys defined\n        in this HyperparameterDefaults  .\n        \"\"\"\n        return dict(\n            (key, value) for (key, value)\n            in obj.items()\n            if key in self.defaults)\n\n    def check_valid_keys(self, obj):\n        \"\"\"\n        Given a dict of hyperparameter settings, throw an exception if any\n        keys are not defined in this HyperparameterDefaults instance.\n        \"\"\"\n        invalid_keys = [\n            x for x in obj if x not in self.defaults\n        ]\n        if invalid_keys:\n            raise ValueError(\n                \"No such model parameters: %s. Valid parameters are: %s\"\n                % (\" \".join(invalid_keys), \" \".join(self.defaults)))\n\n    def models_grid(self, **kwargs):\n        '''\n        Make a grid of models by taking the cartesian product of all specified\n        model parameter lists.\n\n        Parameters\n        -----------\n        The valid kwarg parameters are the entries of this\n        HyperparameterDefaults instance. Each parameter must be a list\n        giving the values to search across.\n\n        Returns\n        -----------\n        list of dict giving the parameters for each model. The length of the\n        list is the product of the lengths of the input lists.\n        '''\n\n        # Check parameters\n        self.check_valid_keys(kwargs)\n        for (key, value) in kwargs.items():\n            if not isinstance(value, list):\n                raise ValueError(\n                    \"All parameters must be lists, but %s is %s\"\n                    % (key, str(type(value))))\n\n        # Make models, using defaults.\n        parameters = dict(\n            (key, [value]) for (key, value) in self.defaults.items())\n        parameters.update(kwargs)\n        parameter_names = list(parameters)\n        parameter_values = [parameters[name] for name in parameter_names]\n\n        models = [\n            dict(zip(parameter_names, model_values))\n            for model_values in itertools.product(*parameter_values)\n        ]\n        return models\n"
  },
  {
    "path": "mhcflurry/local_parallelism.py",
    "content": "\"\"\"\nInfrastructure for \"local\" parallelism, i.e. multiprocess parallelism on one\ncompute node.\n\"\"\"\n\nimport itertools\nimport traceback\nimport sys\nimport os\nimport time\nimport queue\nfrom multiprocessing import Pool, Queue, cpu_count\nfrom multiprocessing.util import Finalize\nfrom pprint import pprint\nimport random\n\nimport numpy\n\nfrom .common import configure_pytorch, normalize_pytorch_backend\n\n\ndef add_local_parallelism_args(parser):\n    \"\"\"\n    Add local parallelism arguments to the given argparse.ArgumentParser.\n\n    Parameters\n    ----------\n    parser : argparse.ArgumentParser\n    \"\"\"\n    group = parser.add_argument_group(\"Local parallelism\")\n\n    group.add_argument(\n        \"--num-jobs\",\n        default=0,\n        type=int,\n        metavar=\"N\",\n        help=\"Number of local processes to parallelize training over. \"\n             \"Set to 0 for serial run. Default: %(default)s.\")\n    group.add_argument(\n        \"--backend\",\n        choices=(\"auto\", \"default\", \"gpu\", \"mps\", \"cpu\"),\n        default=\"auto\",\n        help=\"Device backend. 'default' is a legacy alias for 'auto'. 'gpu' \"\n             \"means CUDA. 'auto' (default) selects the \"\n             \"best available device: GPU > MPS > CPU. When --gpus is set, \"\n             \"GPU-assigned workers use CUDA and overflow workers are forced \"\n             \"to CPU.\")\n    group.add_argument(\n        \"--gpus\",\n        type=int,\n        metavar=\"N\",\n        help=\"Number of CUDA GPUs, starting at index 0, to assign across \"\n             \"parallel workers. Requires --num-jobs > 0. Each assigned worker \"\n             \"gets one GPU; workers beyond --gpus * --max-workers-per-gpu run \"\n             \"on CPU.\")\n    group.add_argument(\n        \"--max-workers-per-gpu\",\n        type=int,\n        metavar=\"N\",\n        default=1000,\n        help=\"Maximum number of workers to assign to a GPU. Additional tasks will \"\n             \"run on CPU.\")\n    group.add_argument(\n        \"--max-tasks-per-worker\",\n        type=int,\n        metavar=\"N\",\n        default=None,\n        help=\"Restart workers after N tasks. Workaround for memory \"\n             \"leaks. Requires Python >=3.2.\")\n    group.add_argument(\n        \"--worker-log-dir\",\n        default=None,\n        help=\"Write worker stdout and stderr logs to given directory.\")\n\n\ndef worker_pool_with_gpu_assignments_from_args(args):\n    \"\"\"\n    Create a multiprocessing.Pool where each worker uses its own GPU.\n\n    Uses commandline arguments. See `worker_pool_with_gpu_assignments`.\n\n    Parameters\n    ----------\n    args : argparse.ArgumentParser\n\n    Returns\n    -------\n    multiprocessing.Pool\n    \"\"\"\n\n    return worker_pool_with_gpu_assignments(\n        num_jobs=args.num_jobs,\n        num_gpus=args.gpus,\n        backend=args.backend,\n        max_workers_per_gpu=args.max_workers_per_gpu,\n        max_tasks_per_worker=args.max_tasks_per_worker,\n        worker_log_dir=args.worker_log_dir,\n    )\n\n\ndef worker_pool_with_gpu_assignments(\n        num_jobs,\n        num_gpus=0,\n        backend=None,\n        max_workers_per_gpu=1,\n        max_tasks_per_worker=None,\n        worker_log_dir=None):\n    \"\"\"\n    Create a multiprocessing.Pool where each worker uses its own GPU.\n\n    Parameters\n    ----------\n    num_jobs : int\n        Number of worker processes.\n    num_gpus : int\n    backend : string\n    max_workers_per_gpu : int\n    max_tasks_per_worker : int\n    worker_log_dir : string\n\n    Returns\n    -------\n    multiprocessing.Pool\n    \"\"\"\n    backend = normalize_pytorch_backend(backend or \"auto\")\n    validate_worker_pool_args(\n        num_jobs=num_jobs,\n        num_gpus=num_gpus,\n        backend=backend,\n        max_workers_per_gpu=max_workers_per_gpu)\n\n    if num_jobs == 0:\n        configure_pytorch(backend=backend)\n        return None\n\n    worker_init_kwargs = worker_init_kwargs_for_scheduler(\n        num_jobs=num_jobs,\n        num_gpus=num_gpus,\n        backend=backend,\n        max_workers_per_gpu=max_workers_per_gpu)\n    if num_gpus:\n        print(\n            \"Assigning %d workers across %d CUDA GPUs (%d workers max per GPU). \"\n            \"Overflow workers will run on CPU.\" % (\n                num_jobs, num_gpus, max_workers_per_gpu))\n        for (worker_num, kwargs) in enumerate(worker_init_kwargs):\n            print(\n                \"Worker %d assigned backend=%s GPUs=%s\" % (\n                    worker_num,\n                    kwargs[\"backend\"],\n                    kwargs.get(\"gpu_device_nums\")))\n\n    if worker_log_dir:\n        for kwargs in worker_init_kwargs:\n            kwargs[\"worker_log_dir\"] = worker_log_dir\n\n    worker_pool = make_worker_pool(\n        processes=num_jobs,\n        initializer=worker_init,\n        initializer_kwargs_per_process=worker_init_kwargs,\n        max_tasks_per_worker=max_tasks_per_worker)\n    return worker_pool\n\n\ndef validate_worker_pool_args(\n        num_jobs,\n        num_gpus=0,\n        backend=\"auto\",\n        max_workers_per_gpu=1):\n    \"\"\"\n    Validate local worker scheduling arguments.\n\n    ``--gpus`` controls CUDA worker assignment only. It does not select MPS\n    devices and it does not distribute a single model across multiple GPUs.\n    \"\"\"\n    backend = normalize_pytorch_backend(backend or \"auto\")\n    if num_jobs < 0:\n        raise ValueError(\"num_jobs must be >= 0\")\n    if num_gpus is None:\n        num_gpus = 0\n    if num_gpus < 0:\n        raise ValueError(\"num_gpus must be >= 0\")\n    if max_workers_per_gpu < 1:\n        raise ValueError(\"max_workers_per_gpu must be >= 1\")\n    if num_gpus:\n        if num_jobs == 0:\n            raise ValueError(\"num_gpus requires num_jobs > 0\")\n        if backend not in (\"auto\", \"gpu\"):\n            raise ValueError(\n                \"num_gpus is only supported with backend 'auto' or 'gpu'\")\n\n\ndef worker_init_kwargs_for_scheduler(\n        num_jobs,\n        num_gpus=0,\n        backend=\"auto\",\n        max_workers_per_gpu=1):\n    \"\"\"\n    Build per-worker init kwargs from the local scheduling configuration.\n\n    When ``num_gpus`` is set, workers are assigned one CUDA GPU each in round\n    robin order. Any additional workers are forced onto CPU by hiding CUDA and\n    setting their backend to ``cpu``.\n    \"\"\"\n    backend = normalize_pytorch_backend(backend or \"auto\")\n    validate_worker_pool_args(\n        num_jobs=num_jobs,\n        num_gpus=num_gpus,\n        backend=backend,\n        max_workers_per_gpu=max_workers_per_gpu)\n\n    if not num_gpus:\n        return [{\"backend\": backend} for _ in range(num_jobs)]\n\n    gpu_assignments = list(itertools.chain.from_iterable(\n        range(num_gpus) for _ in range(max_workers_per_gpu)))\n\n    worker_kwargs = []\n    for worker_num in range(num_jobs):\n        if worker_num < len(gpu_assignments):\n            worker_kwargs.append({\n                \"backend\": \"gpu\",\n                \"gpu_device_nums\": [gpu_assignments[worker_num]],\n            })\n        else:\n            worker_kwargs.append({\n                \"backend\": \"cpu\",\n                \"gpu_device_nums\": [],\n            })\n    return worker_kwargs\n\n\ndef make_worker_pool(\n        processes=None,\n        initializer=None,\n        initializer_kwargs_per_process=None,\n        max_tasks_per_worker=None):\n    \"\"\"\n    Convenience wrapper to create a multiprocessing.Pool.\n\n    This function adds support for per-worker initializer arguments, which are\n    not natively supported by the multiprocessing module. The motivation for\n    this feature is to support allocating each worker to a (different) GPU.\n\n    IMPLEMENTATION NOTE:\n        The per-worker initializer arguments are implemented using a Queue. Each\n        worker reads its arguments from this queue when it starts. When it\n        terminates, it adds its initializer arguments back to the queue, so a\n        future process can initialize itself using these arguments.\n\n        There is one issue with this approach, however. If a worker crashes, it\n        never repopulates the queue of initializer arguments. This will prevent\n        any future worker from re-using those arguments. To deal with this\n        issue we add a second 'backup queue'. This queue always contains the\n        full set of initializer arguments: whenever a worker reads from it, it\n        always pushes the pop'd args back to the end of the queue immediately.\n        If the primary arg queue is ever empty, then workers will read\n        from this backup queue.\n\n    Parameters\n    ----------\n    processes : int\n        Number of workers. Default: num CPUs.\n\n    initializer : function, optional\n        Init function to call in each worker\n\n    initializer_kwargs_per_process : list of dict, optional\n        Arguments to pass to initializer function for each worker. Length of\n        list must equal the number of workers.\n\n    max_tasks_per_worker : int, optional\n        Restart workers after this many tasks. Requires Python >=3.2.\n\n    Returns\n    -------\n    multiprocessing.Pool\n    \"\"\"\n\n    if not processes:\n        processes = cpu_count()\n\n    pool_kwargs = {\n        'processes': processes,\n    }\n    if max_tasks_per_worker:\n        pool_kwargs[\"maxtasksperchild\"] = max_tasks_per_worker\n\n    if initializer:\n        if initializer_kwargs_per_process:\n            assert len(initializer_kwargs_per_process) == processes\n            kwargs_queue = Queue()\n            kwargs_queue_backup = Queue()\n            for kwargs in initializer_kwargs_per_process:\n                kwargs_queue.put(kwargs)\n                kwargs_queue_backup.put(kwargs)\n            pool_kwargs[\"initializer\"] = worker_init_entry_point\n            pool_kwargs[\"initargs\"] = (\n                initializer, kwargs_queue, kwargs_queue_backup)\n        else:\n            pool_kwargs[\"initializer\"] = initializer\n\n    worker_pool = Pool(**pool_kwargs)\n    print(\"Started pool: %s\" % str(worker_pool))\n    pprint(pool_kwargs)\n    return worker_pool\n\n\ndef worker_init_entry_point(\n        init_function, arg_queue=None, backup_arg_queue=None):\n    kwargs = {}\n    if arg_queue:\n        try:\n            kwargs = arg_queue.get(block=False)\n        except queue.Empty:\n            print(\"Argument queue empty. Using round robin arg queue.\")\n            kwargs = backup_arg_queue.get(block=True)\n            backup_arg_queue.put(kwargs)\n\n        # On exit we add the init args back to the queue so restarted workers\n        # (e.g. when when running with maxtasksperchild) will pickup init\n        # arguments from a previously exited worker.\n        Finalize(None, arg_queue.put, (kwargs,), exitpriority=1)\n\n    print(\"Initializing worker: %s\" % str(kwargs))\n    init_function(**kwargs)\n\n\ndef worker_init(\n        keras_backend=None, backend=None, gpu_device_nums=None,\n        worker_log_dir=None):\n    del keras_backend  # legacy argument retained for API compatibility\n    if worker_log_dir:\n        sys.stderr = sys.stdout = open(os.path.join(\n            worker_log_dir,\n            \"LOG-worker.%d.%d.txt\" % (os.getpid(), int(time.time()))), \"w\")\n\n    # Each worker needs distinct random numbers\n    numpy.random.seed()\n    random.seed()\n    if gpu_device_nums is not None:\n        print(\"WORKER pid=%d assigned GPU devices: %s\" % (\n            os.getpid(), gpu_device_nums))\n        configure_pytorch(backend=backend, gpu_device_nums=gpu_device_nums)\n    else:\n        configure_pytorch(backend=backend)\n\n\n# Solution suggested in https://bugs.python.org/issue13831\nclass WrapException(Exception):\n    \"\"\"\n    Add traceback info to exception so exceptions raised in worker processes\n    can still show traceback info when re-raised in the parent.\n    \"\"\"\n    def __init__(self):\n        exc_type, exc_value, exc_tb = sys.exc_info()\n        self.exception = exc_value\n        self.formatted = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))\n    def __str__(self):\n        return '%s\\nOriginal traceback:\\n%s' % (Exception.__str__(self), self.formatted)\n\n\ndef call_wrapped(function, *args, **kwargs):\n    \"\"\"\n    Run function on args and kwargs and return result, wrapping any exception\n    raised in a WrapException.\n\n    Parameters\n    ----------\n    function : arbitrary function\n\n    Any other arguments provided are passed to the function.\n\n    Returns\n    -------\n    object\n    \"\"\"\n    try:\n        return function(*args, **kwargs)\n    except Exception:\n        raise WrapException()\n\n\ndef call_wrapped_kwargs(function, kwargs):\n    \"\"\"\n    Invoke function on given kwargs and return result, wrapping any exception\n    raised in a WrapException.\n\n    Parameters\n    ----------\n    function : arbitrary function\n    kwargs : dict\n\n    Returns\n    -------\n    object\n\n    result of calling function(**kwargs)\n\n    \"\"\"\n    return call_wrapped(function, **kwargs)\n"
  },
  {
    "path": "mhcflurry/percent_rank_transform.py",
    "content": "\"\"\"\nClass for transforming arbitrary values into percent ranks given a distribution.\n\"\"\"\nimport numpy\nimport pandas\n\n\nclass PercentRankTransform(object):\n    \"\"\"\n    Transform arbitrary values into percent ranks.\n    \"\"\"\n\n    def __init__(self):\n        self.cdf = None\n        self.bin_edges = None\n\n    def fit(self, values, bins):\n        \"\"\"\n        Fit the transform using the given values (e.g. ic50s).\n\n        Parameters\n        ----------\n        values : predictions (e.g. ic50 values)\n        bins : bins for the cumulative distribution function\n            Anything that can be passed to numpy.histogram's \"bins\" argument\n            can be used here.\n        \"\"\"\n        assert self.cdf is None\n        assert self.bin_edges is None\n        assert len(values) > 0\n        (hist, self.bin_edges) = numpy.histogram(values, bins=bins)\n        self.cdf = numpy.ones(len(hist) + 3) * numpy.nan\n        self.cdf[0] = 0.0\n        self.cdf[1] = 0.0\n        self.cdf[-1] = 100.0\n        numpy.cumsum(hist * 100.0 / numpy.sum(hist), out=self.cdf[2:-1])\n        assert not numpy.isnan(self.cdf).any()\n\n    def transform(self, values):\n        \"\"\"\n        Return percent ranks (range [0, 100]) for the given values.\n        \"\"\"\n        assert self.cdf is not None\n        assert self.bin_edges is not None\n        indices = numpy.searchsorted(self.bin_edges, values)\n        result = self.cdf[indices]\n        assert len(result) == len(values)\n\n        # NaNs in input become NaNs in output\n        result[numpy.isnan(values)] = numpy.nan\n\n        return numpy.minimum(result, 100.0)\n\n    def to_series(self):\n        \"\"\"\n        Serialize the fit to a pandas.Series.\n\n        The index on the series gives the bin edges and the values give the CDF.\n\n        Returns\n        -------\n        pandas.Series\n\n        \"\"\"\n        return pandas.Series(\n            self.cdf, index=[numpy.nan] + list(self.bin_edges) + [numpy.nan])\n\n    @staticmethod\n    def from_series(series):\n        \"\"\"\n        Deseralize a PercentRankTransform the given pandas.Series, as returned\n        by `to_series()`.\n\n        Parameters\n        ----------\n        series : pandas.Series\n\n        Returns\n        -------\n        PercentRankTransform\n\n        \"\"\"\n        result = PercentRankTransform()\n        result.cdf = series.values\n        result.bin_edges = series.index.values[1:-1]\n        return result\n"
  },
  {
    "path": "mhcflurry/predict_command.py",
    "content": "'''\nRun MHCflurry predictor on specified peptides.\n\nBy default, the presentation predictor is used, and predictions for\nMHC I binding affinity, antigen processing, and the composite presentation score\nare returned. If you just want binding affinity predictions, pass\n--affinity-only.\n\nExamples:\n\nWrite a CSV file containing the contents of INPUT.csv plus additional columns\ngiving MHCflurry predictions:\n\n$ mhcflurry-predict INPUT.csv --out RESULT.csv\n\nThe input CSV file is expected to contain columns \"allele\", \"peptide\", and,\noptionally, \"n_flank\", and \"c_flank\".\n\nIf `--out` is not specified, results are written to stdout.\n\nYou can also run on alleles and peptides specified on the commandline, in\nwhich case predictions are written for *all combinations* of alleles and\npeptides:\n\n$ mhcflurry-predict --alleles HLA-A0201 H-2Kb --peptides SIINFEKL DENDREKLLL\n\nInstead of individual alleles (in a CSV or on the command line), you can also\ngive a comma separated list of alleles giving a sample genotype. In this case,\nthe tightest binding affinity across the alleles for the sample will be\nreturned. For example:\n\n$ mhcflurry-predict --peptides SIINFEKL DENDREKLLL \\\n    --alleles \\\n        HLA-A*02:01,HLA-A*03:01,HLA-B*57:01,HLA-B*45:01,HLA-C*02:01,HLA-C*07:02 \\\n        HLA-A*01:01,HLA-A*02:06,HLA-B*44:02,HLA-B*07:02,HLA-C*01:01,HLA-C*03:01\n\nwill give the tightest predicted affinities across alleles for each of the two\ngenotypes specified for each peptide.\n'''\nimport sys\nimport argparse\nimport itertools\nimport logging\nimport os\n\nimport pandas\n\nfrom .downloads import get_default_class1_presentation_models_dir\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .class1_presentation_predictor import Class1PresentationPredictor\nfrom .version import __version__\n\n\nparser = argparse.ArgumentParser(\n    description=__doc__,\n    formatter_class=argparse.RawDescriptionHelpFormatter,\n    add_help=False)\n\n\nhelper_args = parser.add_argument_group(title=\"Help\")\nhelper_args.add_argument(\n    \"-h\", \"--help\",\n    action=\"help\",\n    help=\"Show this help message and exit\"\n)\nhelper_args.add_argument(\n    \"--list-supported-alleles\",\n    action=\"store_true\",\n    default=False,\n    help=\"Prints the list of supported alleles and exits\"\n)\nhelper_args.add_argument(\n    \"--list-supported-peptide-lengths\",\n    action=\"store_true\",\n    default=False,\n    help=\"Prints the list of supported peptide lengths and exits\"\n)\nhelper_args.add_argument(\n    \"--version\",\n    action=\"version\",\n    version=\"mhcflurry %s\" % __version__,\n)\n\ninput_args = parser.add_argument_group(title=\"Input (required)\")\ninput_args.add_argument(\n    \"input\",\n    metavar=\"INPUT.csv\",\n    nargs=\"?\",\n    help=\"Input CSV\")\ninput_args.add_argument(\n    \"--alleles\",\n    metavar=\"ALLELE\",\n    nargs=\"+\",\n    help=\"Alleles to predict (exclusive with passing an input CSV)\")\ninput_args.add_argument(\n    \"--peptides\",\n    metavar=\"PEPTIDE\",\n    nargs=\"+\",\n    help=\"Peptides to predict (exclusive with passing an input CSV)\")\n\ninput_mod_args = parser.add_argument_group(title=\"Input options\")\ninput_mod_args.add_argument(\n    \"--allele-column\",\n    metavar=\"NAME\",\n    default=\"allele\",\n    help=\"Input column name for alleles. Default: '%(default)s'\")\ninput_mod_args.add_argument(\n    \"--peptide-column\",\n    metavar=\"NAME\",\n    default=\"peptide\",\n    help=\"Input column name for peptides. Default: '%(default)s'\")\ninput_mod_args.add_argument(\n    \"--n-flank-column\",\n    metavar=\"NAME\",\n    default=\"n_flank\",\n    help=\"Column giving N-terminal flanking sequence. Default: '%(default)s'\")\ninput_mod_args.add_argument(\n    \"--c-flank-column\",\n    metavar=\"NAME\",\n    default=\"c_flank\",\n    help=\"Column giving C-terminal flanking sequence. Default: '%(default)s'\")\ninput_mod_args.add_argument(\n    \"--no-throw\",\n    action=\"store_true\",\n    default=False,\n    help=\"Return NaNs for unsupported alleles or peptides instead of raising\")\n\noutput_args = parser.add_argument_group(title=\"Output options\")\noutput_args.add_argument(\n    \"--out\",\n    metavar=\"OUTPUT.csv\",\n    help=\"Output CSV\")\noutput_args.add_argument(\n    \"--prediction-column-prefix\",\n    metavar=\"NAME\",\n    default=\"mhcflurry_\",\n    help=\"Prefix for output column names. Default: '%(default)s'\")\noutput_args.add_argument(\n    \"--output-delimiter\",\n    metavar=\"CHAR\",\n    default=\",\",\n    help=\"Delimiter character for results. Default: '%(default)s'\")\noutput_args.add_argument(\n    \"--no-affinity-percentile\",\n    default=False,\n    action=\"store_true\",\n    help=\"Do not include affinity percentile rank\")\noutput_args.add_argument(\n    \"--always-include-best-allele\",\n    default=False,\n    action=\"store_true\",\n    help=\"Always include the best_allele column even when it is identical \"\n    \"to the allele column (i.e. all queries are monoallelic).\")\n\nmodel_args = parser.add_argument_group(title=\"Model options\")\nmodel_args.add_argument(\n    \"--models\",\n    metavar=\"DIR\",\n    default=None,\n    help=\"Directory containing models. Either a binding affinity predictor or \"\n    \"a presentation predictor can be used. \"\n    \"Default: %s\" % get_default_class1_presentation_models_dir(\n        test_exists=False))\nmodel_args.add_argument(\n    \"--affinity-only\",\n    action=\"store_true\",\n    default=False,\n    help=\"Affinity prediction only (no antigen processing or presentation)\")\nmodel_args.add_argument(\n    \"--no-flanking\",\n    action=\"store_true\",\n    default=False,\n    help=\"Do not use flanking sequence information even when available\")\n\n\ndef run(argv=sys.argv[1:]):\n    if not argv:\n        parser.print_help()\n        parser.exit(1)\n\n    args = parser.parse_args(argv)\n\n    # It's hard to pass a tab in a shell, so we correct a common error:\n    if args.output_delimiter == \"\\\\t\":\n        args.output_delimiter = \"\\t\"\n\n    models_dir = args.models\n    if models_dir is None:\n        # The reason we set the default here instead of in the argument parser\n        # is that we want to test_exists at this point, so the user gets a\n        # message instructing them to download the models if needed.\n        models_dir = get_default_class1_presentation_models_dir(test_exists=True)\n\n    if os.path.exists(os.path.join(models_dir, \"weights.csv\")):\n        # Using a presentation predictor.\n        predictor = Class1PresentationPredictor.load(models_dir)\n    else:\n        # Using just an affinity predictor.\n        affinity_predictor = Class1AffinityPredictor.load(models_dir)\n        predictor = Class1PresentationPredictor(\n            affinity_predictor=affinity_predictor)\n        if not args.affinity_only:\n            logging.warning(\n                \"Specified models are an affinity predictor, which implies \"\n                \"--affinity-only. Specify this argument to silence this warning.\")\n            args.affinity_only = True\n\n    if args.list_supported_alleles:\n        print(\"\\n\".join(predictor.supported_alleles))\n        return\n\n    if args.list_supported_peptide_lengths:\n        min_len, max_len = predictor.supported_peptide_lengths\n        print(\"\\n\".join([str(length) for length in range(min_len, max_len + 1)]))\n        return\n\n    if args.input:\n        if args.alleles or args.peptides:\n            parser.error(\n                \"If an input file is specified, do not specify --alleles \"\n                \"or --peptides\")\n        df = pandas.read_csv(args.input)\n        print(\"Read input CSV with %d rows, columns are: %s\" % (\n            len(df), \", \".join(df.columns)))\n        for col in [args.allele_column, args.peptide_column]:\n            if col not in df.columns:\n                raise ValueError(\n                    \"No such column '%s' in CSV. Columns are: %s\" % (\n                        col, \", \".join([\"'%s'\" % c for c in df.columns])))\n    else:\n        if not args.alleles or not args.peptides:\n            parser.error(\n                \"Specify either an input CSV file or both the \"\n                \"--alleles and --peptides arguments\")\n\n        pairs = list(itertools.product(args.alleles, args.peptides))\n        df = pandas.DataFrame({\n            \"allele\": [p[0] for p in pairs],\n            \"peptide\": [p[1] for p in pairs],\n        })\n        logging.info(\n            \"Predicting for %d alleles and %d peptides = %d predictions\" % (\n                len(args.alleles), len(args.peptides), len(df)))\n\n    allele_string_to_alleles = (\n        df.drop_duplicates(args.allele_column).set_index(\n            args.allele_column, drop=False)[\n                args.allele_column\n        ].str.split(r\"[,\\s]+\")).to_dict()\n\n    if args.affinity_only:\n        predictions = predictor.predict_affinity(\n            peptides=df[args.peptide_column].values,\n            alleles=allele_string_to_alleles,\n            sample_names=df[args.allele_column],\n            throw=not args.no_throw,\n            include_affinity_percentile=not args.no_affinity_percentile)\n    else:\n        n_flanks = None\n        c_flanks = None\n        if not args.no_flanking:\n            if args.n_flank_column in df.columns and args.c_flank_column in df.columns:\n                n_flanks = df[args.n_flank_column]\n                c_flanks = df[args.c_flank_column]\n            else:\n                logging.warning(\n                    \"No flanking information provided. Specify --no-flanking \"\n                    \"to silence this warning\")\n\n        predictions = predictor.predict(\n            peptides=df[args.peptide_column].values,\n            n_flanks=n_flanks,\n            c_flanks=c_flanks,\n            alleles=allele_string_to_alleles,\n            sample_names=df[args.allele_column],\n            throw=not args.no_throw,\n            include_affinity_percentile=not args.no_affinity_percentile)\n\n    # If each query is just for a single allele, the \"best_allele\" column\n    # is redundant so we remove it.\n    if not args.always_include_best_allele:\n        if all(len(a) == 1 for a in allele_string_to_alleles.values()):\n            del predictions[\"best_allele\"]\n\n    for col in predictions.columns:\n        if col not in (\"allele\", \"peptide\", \"sample_name\", \"peptide_num\"):\n            df[args.prediction_column_prefix + col] = predictions[col]\n\n    if args.out:\n        df.to_csv(args.out, index=False, sep=args.output_delimiter)\n        print(\"Wrote: %s\" % args.out)\n    else:\n        df.to_csv(sys.stdout, index=False, sep=args.output_delimiter)\n"
  },
  {
    "path": "mhcflurry/predict_scan_command.py",
    "content": "'''\nScan protein sequences using the MHCflurry presentation predictor.\n\nBy default, sub-sequences (peptides) with affinity percentile ranks less than\n2.0 are returned. You can also specify --results-all to return predictions for\nall peptides, or adjust the filter threshold(s) using the --threshold-* options.\n\nExamples:\n\nScan a set of sequences in a FASTA file for binders to any alleles in a MHC I\ngenotype:\n\n$ mhcflurry-predict-scan \\\n    test/data/example.fasta \\\n    --alleles HLA-A*02:01,HLA-A*03:01,HLA-B*57:01,HLA-B*45:01,HLA-C*02:01,HLA-C*07:02\n\nInstead of a FASTA, you can also pass a CSV that has \"sequence_id\" and \"sequence\"\ncolumns.\n\nYou can also specify multiple MHC I genotypes to scan as space-separated\narguments to the --alleles option:\n\n$ mhcflurry-predict-scan \\\n    test/data/example.fasta \\\n    --alleles \\\n        HLA-A*02:01,HLA-A*03:01,HLA-B*57:01,HLA-B*45:01,HLA-C*02:02,HLA-C*07:02 \\\n        HLA-A*01:01,HLA-A*02:06,HLA-B*44:02,HLA-B*07:02,HLA-C*01:02,HLA-C*03:01\n\nIf `--out` is not specified, results are written to standard out.\n\nYou can also specify sequences on the commandline:\n\nmhcflurry-predict-scan \\\n    --sequences MGYINVFAFPFTIYSLLLCRMNSRNYIAQVDVVNFNLT \\\n    --alleles HLA-A*02:01,HLA-A*03:01,HLA-B*57:01,HLA-B*45:01,HLA-C*02:02,HLA-C*07:02\n\n'''\nimport sys\nimport argparse\n\nimport pandas\n\nfrom .downloads import get_default_class1_presentation_models_dir\nfrom .class1_presentation_predictor import Class1PresentationPredictor\nfrom .fasta import read_fasta_to_dataframe\nfrom .version import __version__\n\n\nparser = argparse.ArgumentParser(\n    description=__doc__,\n    formatter_class=argparse.RawDescriptionHelpFormatter,\n    add_help=False)\n\n\nhelper_args = parser.add_argument_group(title=\"Help\")\nhelper_args.add_argument(\n    \"-h\", \"--help\",\n    action=\"help\",\n    help=\"Show this help message and exit\"\n)\nhelper_args.add_argument(\n    \"--list-supported-alleles\",\n    action=\"store_true\",\n    default=False,\n    help=\"Print the list of supported alleles and exit\"\n)\nhelper_args.add_argument(\n    \"--list-supported-peptide-lengths\",\n    action=\"store_true\",\n    default=False,\n    help=\"Print the list of supported peptide lengths and exit\"\n)\nhelper_args.add_argument(\n    \"--version\",\n    action=\"version\",\n    version=\"mhcflurry %s\" % __version__,\n)\n\ninput_args = parser.add_argument_group(title=\"Input options\")\ninput_args.add_argument(\n    \"input\",\n    metavar=\"INPUT\",\n    nargs=\"?\",\n    help=\"Input CSV or FASTA\")\ninput_args.add_argument(\n    \"--input-format\",\n    choices=(\"guess\", \"csv\", \"fasta\"),\n    default=\"guess\",\n    help=\"Format of input file. By default, it is guessed from the file \"\n         \"extension.\")\ninput_args.add_argument(\n    \"--alleles\",\n    metavar=\"ALLELE\",\n    nargs=\"+\",\n    help=\"Alleles to predict\")\ninput_args.add_argument(\n    \"--sequences\",\n    metavar=\"SEQ\",\n    nargs=\"+\",\n    help=\"Sequences to predict (exclusive with passing an input file)\")\ninput_args.add_argument(\n    \"--sequence-id-column\",\n    metavar=\"NAME\",\n    default=\"sequence_id\",\n    help=\"Input CSV column name for sequence IDs. Default: '%(default)s'\")\ninput_args.add_argument(\n    \"--sequence-column\",\n    metavar=\"NAME\",\n    default=\"sequence\",\n    help=\"Input CSV column name for sequences. Default: '%(default)s'\")\ninput_args.add_argument(\n    \"--no-throw\",\n    action=\"store_true\",\n    default=False,\n    help=\"Return NaNs for unsupported alleles or peptides instead of raising\")\n\nresults_args = parser.add_argument_group(title=\"Result options\")\nresults_args.add_argument(\n    \"--peptide-lengths\",\n    default=\"8-11\",\n    metavar=\"L\",\n    help=\"Peptide lengths to consider. Pass as START-END (e.g. 8-11) or a \"\n    \"comma-separated list (8,9,10,11). When using START-END, the range is \"\n    \"INCLUSIVE on both ends. Default: %(default)s.\")\ndefault_thresholds = {\n    \"presentation_score\": 0.7,\n    \"processing_score\": 0.5,\n    \"affinity\": 500,\n    \"affinity_percentile\": 2.0,\n}\nresults_args.add_argument(\n    \"--results-all\",\n    action=\"store_true\",\n    default=False,\n    help=\"Return results for all peptides regardless of affinity, etc.\")\nresults_args.add_argument(\n    \"--threshold-presentation-score\",\n    type=float,\n    help=f\"Threshold if filtering by presentation score. Default: > {default_thresholds['presentation_score']}\")\nresults_args.add_argument(\n    \"--threshold-processing-score\",\n    type=float,\n    help=f\"Threshold if filtering by processing score. Default: > {default_thresholds['processing_score']}\")\nresults_args.add_argument(\n    \"--threshold-affinity\",\n    type=float,\n    help=f\"Threshold if filtering by affinity. Default: < {default_thresholds['affinity']}\")\nresults_args.add_argument(\n    \"--threshold-affinity-percentile\",\n    type=float,\n    help=f\"Threshold if filtering by affinity percentile. Default: < {default_thresholds['affinity_percentile']}\")\n\n\noutput_args = parser.add_argument_group(title=\"Output options\")\noutput_args.add_argument(\n    \"--out\",\n    metavar=\"OUTPUT.csv\",\n    help=\"Output CSV\")\noutput_args.add_argument(\n    \"--output-delimiter\",\n    metavar=\"CHAR\",\n    default=\",\",\n    help=\"Delimiter character for results. Default: '%(default)s'\")\noutput_args.add_argument(\n    \"--no-affinity-percentile\",\n    default=False,\n    action=\"store_true\",\n    help=\"Do not include affinity percentile rank\")\n\nmodel_args = parser.add_argument_group(title=\"Model options\")\nmodel_args.add_argument(\n    \"--models\",\n    metavar=\"DIR\",\n    default=None,\n    help=\"Directory containing presentation models.\"\n    \"Default: %s\" % get_default_class1_presentation_models_dir(\n        test_exists=False))\nmodel_args.add_argument(\n    \"--no-flanking\",\n    action=\"store_true\",\n    default=False,\n    help=\"Do not use flanking sequence information in predictions\")\n\n\ndef parse_peptide_lengths(value):\n    try:\n        if \"-\" in value:\n            (start, end) = value.split(\"-\", 2)\n            start = int(start.strip())\n            end = int(end.strip())\n            peptide_lengths = list(range(start, end + 1))\n        else:\n            peptide_lengths = [\n                int(length.strip())\n                for length in value.split(\",\")\n            ]\n    except ValueError:\n        raise ValueError(\"Couldn't parse peptide lengths: \", value)\n    return peptide_lengths\n\n\ndef run(argv=sys.argv[1:]):\n    if not argv:\n        parser.print_help()\n        parser.exit(1)\n\n    args = parser.parse_args(argv)\n\n    # It's hard to pass a tab in a shell, so we correct a common error:\n    if args.output_delimiter == \"\\\\t\":\n        args.output_delimiter = \"\\t\"\n\n    peptide_lengths = parse_peptide_lengths(args.peptide_lengths)\n\n    threshold_args = [\n        args.threshold_presentation_score,\n        args.threshold_processing_score,\n        args.threshold_affinity,\n        args.threshold_affinity_percentile,\n    ]\n    if not args.results_all and all(x is None for x in threshold_args):\n        print(\"Filtering by affinity-percentile < %s\" % default_thresholds[\"affinity_percentile\"])\n        print(\"to show all predictions, pass --results-all\")\n        args.threshold_affinity_percentile = default_thresholds[\"affinity_percentile\"]\n\n    models_dir = args.models\n    if models_dir is None:\n        # The reason we set the default here instead of in the argument parser\n        # is that we want to test_exists at this point, so the user gets a\n        # message instructing them to download the models if needed.\n        models_dir = get_default_class1_presentation_models_dir(test_exists=True)\n\n    predictor = Class1PresentationPredictor.load(models_dir)\n\n    if args.list_supported_alleles:\n        print(\"\\n\".join(predictor.supported_alleles))\n        return\n\n    if args.list_supported_peptide_lengths:\n        min_len, max_len = predictor.supported_peptide_lengths\n        print(\"\\n\".join([str(length) for length in range(min_len, max_len + 1)]))\n        return\n\n    if args.input:\n        if args.sequences:\n            parser.error(\n                \"If an input file is specified, do not specify --sequences\")\n\n        input_format = args.input_format\n        if input_format == \"guess\":\n            extension = args.input.lower().split(\".\")[-1]\n            if extension in [\"gz\", \"bzip2\"]:\n                extension = args.input.lower().split(\".\")[-2]\n\n            if extension == \"csv\":\n                input_format = \"csv\"\n            elif extension in [\"fasta\", \"fa\"]:\n                input_format = \"fasta\"\n            else:\n                parser.error(\n                    \"Couldn't guess input format from file extension: %s\\n\"\n                    \"Pass the --input-format argument to specify if it is a \"\n                    \"CSV or fasta file\" % args.input)\n            print(\"Guessed input file format:\", input_format)\n\n        if input_format == \"csv\":\n            df = pandas.read_csv(args.input)\n            print(\"Read input CSV with %d rows, columns are: %s\" % (\n                len(df), \", \".join(df.columns)))\n            for col in [args.sequence_column,]:\n                if col not in df.columns:\n                    raise ValueError(\n                        \"No such column '%s' in CSV. Columns are: %s\" % (\n                            col, \", \".join([\"'%s'\" % c for c in df.columns])))\n\n        elif input_format == \"fasta\":\n            df = read_fasta_to_dataframe(args.input)\n            print(\"Read input fasta with %d sequences\" % len(df))\n            print(df)\n        else:\n            raise ValueError(\"Unsupported input format\", input_format)\n    else:\n        if not args.sequences:\n            parser.error(\n                \"Specify either an input file or the --sequences argument\")\n\n        df = pandas.DataFrame({\n            args.sequence_column: args.sequences,\n        })\n\n    if args.sequence_id_column not in df:\n        df[args.sequence_id_column] = \"sequence_\" + df.index.astype(str)\n\n    df = df.set_index(args.sequence_id_column)\n\n    if args.alleles:\n        genotypes = pandas.Series(args.alleles).str.split(r\"[,\\s]+\")\n        genotypes.index = genotypes.index.map(lambda i: \"genotype_%02d\" % i)\n        alleles = genotypes.to_dict()\n    else:\n        print(\"No alleles specified. Will perform processing prediction only.\")\n        alleles = {}\n\n    result_df = predictor.predict_sequences(\n        sequences=df[args.sequence_column].to_dict(),\n        alleles=alleles,\n        result=\"all\",\n        peptide_lengths=peptide_lengths,\n        use_flanks=not args.no_flanking,\n        include_affinity_percentile=not args.no_affinity_percentile,\n        throw=not args.no_throw)\n\n    # Apply thresholds\n    if args.threshold_presentation_score is not None:\n        result_df = result_df.loc[result_df.presentation_score >= args.threshold_presentation_score]\n\n    if args.threshold_processing_score is not None:\n        result_df = result_df.loc[result_df.processing_score >= args.threshold_processing_score]\n\n    if args.threshold_affinity is not None:\n        result_df = result_df.loc[result_df.affinity <= args.threshold_affinity]\n\n    if args.threshold_affinity_percentile is not None:\n        result_df = result_df.loc[result_df.affinity_percentile <= args.threshold_affinity_percentile]\n\n    # Write results\n    if args.out:\n        result_df.to_csv(args.out, index=False, sep=args.output_delimiter)\n        print(\"Wrote: %s\" % args.out)\n    else:\n        result_df.to_csv(sys.stdout, index=False, sep=args.output_delimiter)\n"
  },
  {
    "path": "mhcflurry/pytorch_layers.py",
    "content": "\"\"\"\nPyTorch custom layers for mhcflurry.\n\"\"\"\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\ndef get_activation(name):\n    \"\"\"\n    Map activation name string to a PyTorch activation function.\n\n    Parameters\n    ----------\n    name : str\n        Activation name: \"tanh\", \"sigmoid\", \"relu\", \"linear\", or \"\"\n\n    Returns\n    -------\n    callable or None\n        Activation function, or None for no activation\n    \"\"\"\n    if not name or name == \"linear\":\n        return None\n    name = name.lower()\n    if name == \"tanh\":\n        return torch.tanh\n    elif name == \"sigmoid\":\n        return torch.sigmoid\n    elif name == \"relu\":\n        return F.relu\n    else:\n        raise ValueError(f\"Unknown activation: {name}\")\n\n\nclass LocallyConnected1D(nn.Module):\n    \"\"\"\n    A locally connected 1D layer (unshared convolution).\n\n    Unlike Conv1D, this layer uses different filter weights at each position\n    in the input sequence. This is equivalent to Keras' LocallyConnected1D.\n\n    Parameters\n    ----------\n    in_channels : int\n        Number of input channels\n    out_channels : int\n        Number of output channels (filters)\n    input_length : int\n        Length of the input sequence\n    kernel_size : int\n        Size of the convolution kernel\n    activation : str\n        Activation function name\n    \"\"\"\n\n    def __init__(self, in_channels, out_channels, input_length, kernel_size,\n                 activation=\"tanh\"):\n        super(LocallyConnected1D, self).__init__()\n\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.input_length = input_length\n        self.kernel_size = kernel_size\n        self.activation_name = activation\n        self.output_length = input_length - kernel_size + 1\n\n        # Weight shape: (output_length, out_channels, in_channels * kernel_size)\n        self.weight = nn.Parameter(\n            torch.randn(self.output_length, out_channels, in_channels * kernel_size)\n        )\n        # Bias shape: (output_length, out_channels)\n        self.bias = nn.Parameter(\n            torch.zeros(self.output_length, out_channels)\n        )\n\n        self._activation = get_activation(activation)\n\n        # Initialize weights\n        nn.init.xavier_uniform_(self.weight)\n\n    def forward(self, x):\n        \"\"\"\n        Forward pass.\n\n        Parameters\n        ----------\n        x : torch.Tensor\n            Input tensor of shape (batch, sequence_length, in_channels)\n\n        Returns\n        -------\n        torch.Tensor\n            Output tensor of shape (batch, output_length, out_channels)\n        \"\"\"\n        batch_size = x.size(0)\n\n        # Use unfold to extract patches and match Keras flatten order.\n        # x_unfolded shape: (batch, output_length, in_channels, kernel_size)\n        x_unfolded = x.unfold(1, self.kernel_size, 1)\n        # Keras flattens patches with kernel positions first, then channels.\n        x_unfolded = x_unfolded.permute(0, 1, 3, 2)\n        # Reshape to (batch, output_length, kernel_size * in_channels)\n        x_unfolded = x_unfolded.reshape(\n            batch_size, self.output_length, self.kernel_size * self.in_channels\n        )\n\n        # Apply locally connected weights via einsum\n        # x_unfolded: (batch, output_length, in_channels * kernel_size)\n        # weight: (output_length, out_channels, in_channels * kernel_size)\n        # result: (batch, output_length, out_channels)\n        output = torch.einsum('boi,ofi->bof', x_unfolded, self.weight) + self.bias\n\n        if self._activation is not None:\n            output = self._activation(output)\n\n        return output\n"
  },
  {
    "path": "mhcflurry/pytorch_losses.py",
    "content": "\"\"\"\nPyTorch loss functions for mhcflurry.\n\nSupports inequality constraints where training data includes (=), (<), and (>)\nrelationships. For inequality constraints, penalization is applied only when\npredictions violate the constraint.\n\"\"\"\nimport numpy\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\nclass MSEWithInequalities(nn.Module):\n    \"\"\"\n    MSE loss with inequality support.\n\n    y_true is encoded as follows:\n      - [0, 1]: equality constraint, standard MSE\n      - [2, 3]: greater-than constraint (value = y_true - 2), penalize if pred < value\n      - [4, 5]: less-than constraint (value = y_true - 4), penalize if pred > value\n    \"\"\"\n    supports_inequalities = True\n    supports_multiple_outputs = False\n\n    @staticmethod\n    def encode_y(y, inequalities=None):\n        \"\"\"\n        Encode targets with inequality information.\n\n        Parameters\n        ----------\n        y : array-like\n            Target values in [0, 1]\n        inequalities : array-like of str, optional\n            One of \"=\", \">\", \"<\" for each target\n\n        Returns\n        -------\n        numpy.ndarray\n        \"\"\"\n        y = numpy.array(y, dtype=numpy.float32)\n        if numpy.isnan(y).any():\n            raise ValueError(\"y contains NaN\")\n        if (y < 0).any() or (y > 1).any():\n            raise ValueError(\"Targets must be in [0, 1] for MSEWithInequalities\")\n        if inequalities is None:\n            return y\n        if len(inequalities) != len(y):\n            raise ValueError(\"inequalities must have same length as y\")\n        for ineq in inequalities:\n            if ineq not in {\"=\", \">\", \"<\"}:\n                raise ValueError(\"Inequalities must be one of '=', '>', '<'\")\n        offsets = numpy.array([\n            {'=': 0, '>': 2, '<': 4}[ineq] for ineq in inequalities\n        ], dtype=numpy.float32)\n        encoded = y + offsets\n        assert not numpy.isnan(encoded).any()\n        return encoded\n\n    def forward(self, y_pred, y_true, sample_weights=None):\n        \"\"\"\n        Compute loss.\n\n        Parameters\n        ----------\n        y_pred : torch.Tensor\n            Predictions, shape (batch,) or (batch, 1)\n        y_true : torch.Tensor\n            Encoded targets, shape (batch,) or (batch, 1)\n\n        Returns\n        -------\n        torch.Tensor\n            Scalar loss value\n        \"\"\"\n        y_true = y_true.reshape(-1)\n        y_pred = y_pred.reshape(-1)\n\n        # Handle (=) inequalities: 0 <= y_true <= 1\n        diff1 = y_pred - y_true\n        diff1 = diff1 * (y_true >= 0.0).float() * (y_true <= 1.0).float()\n\n        # Handle (>) inequalities: 2 <= y_true <= 3\n        # Penalize only if pred < threshold (diff < 0)\n        diff2 = y_pred - (y_true - 2.0)\n        diff2 = diff2 * (y_true >= 2.0).float() * (y_true <= 3.0).float()\n        diff2 = diff2 * (diff2 < 0.0).float()\n\n        # Handle (<) inequalities: y_true >= 4\n        # Penalize only if pred > threshold (diff > 0)\n        diff3 = y_pred - (y_true - 4.0)\n        diff3 = diff3 * (y_true >= 4.0).float()\n        diff3 = diff3 * (diff3 > 0.0).float()\n\n        per_sample = diff1.square() + diff2.square() + diff3.square()\n        if sample_weights is None:\n            denominator = torch.clamp(\n                (y_true != 2.0).float().sum(), min=1.0\n            )\n            return per_sample.sum() / denominator\n        sample_weights = sample_weights.reshape(-1).to(per_sample.device)\n        mask = (y_true != 2.0).float()\n        denominator = torch.clamp((sample_weights * mask).sum(), min=1.0)\n        return (per_sample * sample_weights).sum() / denominator\n\n\nclass MSEWithInequalitiesAndMultipleOutputs(nn.Module):\n    \"\"\"\n    MSE loss with inequality and multiple output support.\n\n    Extends MSEWithInequalities by encoding the output index into the target:\n    encoded_target = inequality_encoded_value + output_index * 10\n    \"\"\"\n    supports_inequalities = True\n    supports_multiple_outputs = True\n\n    @staticmethod\n    def encode_y(y, inequalities=None, output_indices=None):\n        \"\"\"\n        Encode targets with inequality and output index information.\n\n        Parameters\n        ----------\n        y : array-like\n            Target values in [0, 1]\n        inequalities : array-like of str, optional\n            One of \"=\", \">\", \"<\" for each target\n        output_indices : array-like of int, optional\n            Output index for each target\n\n        Returns\n        -------\n        numpy.ndarray\n        \"\"\"\n        encoded = MSEWithInequalities.encode_y(y, inequalities)\n        if output_indices is not None:\n            output_indices = numpy.array(output_indices)\n            if output_indices.shape != (len(encoded),):\n                raise ValueError(\n                    \"Expected output_indices to have shape %s not %s\"\n                    % ((len(encoded),), output_indices.shape)\n                )\n            if (output_indices < 0).any():\n                raise ValueError(\"Invalid output indices: %s\" % output_indices)\n            encoded = encoded + output_indices.astype(numpy.float32) * 10\n        return encoded\n\n    def forward(self, y_pred, y_true, sample_weights=None):\n        \"\"\"\n        Compute loss.\n\n        Parameters\n        ----------\n        y_pred : torch.Tensor\n            Predictions, shape (batch, num_outputs)\n        y_true : torch.Tensor\n            Encoded targets, shape (batch,) or (batch, 1)\n\n        Returns\n        -------\n        torch.Tensor\n            Scalar loss value\n        \"\"\"\n        y_true = y_true.reshape(-1)\n        if y_pred.dim() == 1:\n            y_pred = y_pred.unsqueeze(1)\n\n        # Decode output indices\n        output_indices = (y_true / 10.0).long()\n        inequality_encoded = y_true - output_indices.float() * 10.0\n\n        # Select the relevant output for each sample\n        batch_indices = torch.arange(len(y_true), device=y_pred.device)\n        output_indices_clamped = output_indices.clamp(0, y_pred.shape[1] - 1)\n        selected_pred = y_pred[batch_indices, output_indices_clamped]\n\n        # Apply MSEWithInequalities logic on selected predictions\n        y_t = inequality_encoded\n        y_p = selected_pred\n\n        # Handle (=) inequalities\n        diff1 = y_p - y_t\n        diff1 = diff1 * (y_t >= 0.0).float() * (y_t <= 1.0).float()\n\n        # Handle (>) inequalities\n        diff2 = y_p - (y_t - 2.0)\n        diff2 = diff2 * (y_t >= 2.0).float() * (y_t <= 3.0).float()\n        diff2 = diff2 * (diff2 < 0.0).float()\n\n        # Handle (<) inequalities\n        diff3 = y_p - (y_t - 4.0)\n        diff3 = diff3 * (y_t >= 4.0).float()\n        diff3 = diff3 * (diff3 > 0.0).float()\n\n        per_sample = diff1.square() + diff2.square() + diff3.square()\n        if sample_weights is None:\n            denominator = torch.clamp(\n                (y_t != 2.0).float().sum(), min=1.0\n            )\n            return per_sample.sum() / denominator\n        sample_weights = sample_weights.reshape(-1).to(per_sample.device)\n        mask = (y_t != 2.0).float()\n        denominator = torch.clamp((sample_weights * mask).sum(), min=1.0)\n        return (per_sample * sample_weights).sum() / denominator\n\n\nclass MultiallelicMassSpecLoss(nn.Module):\n    \"\"\"\n    Loss function for multiallelic mass spectrometry data.\n\n    For each (hit, decoy) pair, penalizes when any decoy allele prediction\n    exceeds the best hit allele prediction by more than delta.\n\n    y_true encoding:\n      - 1.0: hit (positive)\n      - 0.0: decoy (negative)\n      - -1.0: ignored\n    \"\"\"\n    supports_inequalities = True\n    supports_multiple_outputs = False\n\n    def __init__(self, delta=0.2, multiplier=1.0):\n        super(MultiallelicMassSpecLoss, self).__init__()\n        self.delta = delta\n        self.multiplier = multiplier\n\n    @staticmethod\n    def encode_y(y):\n        \"\"\"Encode y (no-op for this loss).\"\"\"\n        y = numpy.array(y, dtype=numpy.float32)\n        assert numpy.isin(y, [-1.0, 0.0, 1.0]).all()\n        return y\n\n    def forward(self, y_pred, y_true, sample_weights=None):\n        \"\"\"\n        Compute loss.\n\n        Parameters\n        ----------\n        y_pred : torch.Tensor\n            Predictions, shape (batch, num_alleles)\n        y_true : torch.Tensor\n            Labels, shape (batch,) or (batch, 1)\n\n        Returns\n        -------\n        torch.Tensor\n            Scalar loss value\n        \"\"\"\n        y_true = y_true.reshape(-1)\n\n        if y_pred.dim() == 1:\n            y_pred = y_pred.unsqueeze(1)\n\n        # Get hit and decoy masks\n        hit_mask = (y_true == 1.0)\n        decoy_mask = (y_true == 0.0)\n\n        num_hits = hit_mask.sum().item()\n        num_decoys = decoy_mask.sum().item()\n\n        if num_hits == 0 or num_decoys == 0:\n            return torch.tensor(0.0, device=y_pred.device, requires_grad=True)\n\n        num_alleles = y_pred.shape[1]\n\n        # Best allele prediction for each hit: (num_hits,)\n        hit_preds = y_pred[hit_mask]\n        hit_max = hit_preds.max(dim=1).values  # (num_hits,)\n\n        # All decoy predictions: (num_decoys, num_alleles)\n        decoy_preds = y_pred[decoy_mask]\n\n        # Compute pairwise terms:\n        # For each (decoy, allele, hit): max(0, decoy_pred - hit_max + delta)^2\n        # decoy_preds: (num_decoys, num_alleles) -> (num_decoys, num_alleles, 1)\n        # hit_max: (num_hits,) -> (1, 1, num_hits)\n        term = decoy_preds.unsqueeze(2) - hit_max.unsqueeze(0).unsqueeze(0) + self.delta\n        penalty = torch.clamp(term, min=0.0).square()\n\n        denominator = num_hits * num_decoys * num_alleles\n        result = self.multiplier * penalty.sum() / denominator\n\n        return result\n\n\nclass StandardLoss(nn.Module):\n    \"\"\"\n    Wrapper for standard PyTorch loss functions (MSE, MAE, etc).\n    \"\"\"\n    supports_inequalities = False\n    supports_multiple_outputs = False\n\n    def __init__(self, loss_name=\"mse\"):\n        super(StandardLoss, self).__init__()\n        self.loss_name = loss_name\n        if loss_name == \"mse\":\n            self._loss_fn = nn.MSELoss()\n        elif loss_name == \"mae\":\n            self._loss_fn = nn.L1Loss()\n        else:\n            raise ValueError(f\"Unknown standard loss: {loss_name}\")\n\n    @staticmethod\n    def encode_y(y):\n        \"\"\"Encode y (simple cast to float32).\"\"\"\n        return numpy.array(y, dtype=numpy.float32)\n\n    def forward(self, y_pred, y_true, sample_weights=None):\n        \"\"\"\n        Compute loss.\n\n        Parameters\n        ----------\n        y_pred : torch.Tensor\n        y_true : torch.Tensor\n        sample_weights : torch.Tensor | None\n            Optional per-example weights.\n\n        Returns\n        -------\n        torch.Tensor\n            Scalar loss value\n        \"\"\"\n        y_pred = y_pred.reshape(-1)\n        y_true = y_true.reshape(-1)\n\n        if sample_weights is None:\n            return self._loss_fn(y_pred, y_true)\n        if self.loss_name == \"mse\":\n            losses = F.mse_loss(y_pred, y_true, reduction=\"none\")\n        elif self.loss_name == \"mae\":\n            losses = F.l1_loss(y_pred, y_true, reduction=\"none\")\n        else:\n            losses = self._loss_fn(y_pred, y_true)\n        sample_weights = sample_weights.reshape(-1).to(losses.device)\n        denominator = torch.clamp(sample_weights.sum(), min=1.0)\n        return (losses * sample_weights).sum() / denominator\n\n\n# Registry of custom losses\n_CUSTOM_LOSSES = {\n    'mse_with_inequalities': MSEWithInequalities,\n    'mse_with_inequalities_and_multiple_outputs': MSEWithInequalitiesAndMultipleOutputs,\n    'multiallelic_mass_spec_loss': MultiallelicMassSpecLoss,\n}\n\n\ndef get_pytorch_loss(name):\n    \"\"\"\n    Get a PyTorch loss object by name.\n\n    Parameters\n    ----------\n    name : str\n        Loss name. Prefix with \"custom:\" for custom losses,\n        otherwise a standard loss name like \"mse\".\n\n    Returns\n    -------\n    nn.Module\n        Loss module with encode_y, supports_inequalities,\n        and supports_multiple_outputs attributes.\n    \"\"\"\n    if name.startswith(\"custom:\"):\n        custom_name = name.replace(\"custom:\", \"\")\n        if custom_name not in _CUSTOM_LOSSES:\n            raise ValueError(\n                f\"No such custom loss: {name}. \"\n                f\"Supported: {', '.join('custom:' + k for k in _CUSTOM_LOSSES)}\"\n            )\n        return _CUSTOM_LOSSES[custom_name]()\n    return StandardLoss(name)\n"
  },
  {
    "path": "mhcflurry/random_negative_peptides.py",
    "content": "import logging\nimport math\n\nimport numpy\nimport pandas\n\nfrom .hyperparameters import HyperparameterDefaults\nfrom .common import amino_acid_distribution, random_peptides\n\n\nclass RandomNegativePeptides(object):\n    \"\"\"\n    Generate random negative (peptide, allele) pairs. These are used during\n    model training, where they are resampled at each epoch.\n    \"\"\"\n\n    hyperparameter_defaults = HyperparameterDefaults(\n        random_negative_rate=0.0,\n        random_negative_constant=0,\n        random_negative_match_distribution=True,\n        random_negative_distribution_smoothing=0.0,\n        random_negative_method=\"recommended\",\n        random_negative_binder_threshold=None,\n        random_negative_lengths=[8,9,10,11,12,13,14,15])\n    \"\"\"\n    Hyperperameters for random negative peptides.\n\n    Number of random negatives will be:\n        random_negative_rate * (num measurements) + random_negative_constant\n\n    where the exact meaning of (num measurements) depends on the particular\n    random_negative_method in use.\n\n    If random_negative_match_distribution is True, then the amino acid\n    frequencies of the training data peptides are used to generate the\n    random peptides.\n\n    Valid values for random_negative_method are:\n        \"by_length\": used for allele-specific prediction. See description in\n            `RandomNegativePeptides.plan_by_length` method.\n        \"by_allele\": used for pan-allele prediction. See\n            `RandomNegativePeptides.plan_by_allele` method.\n        \"by_allele_equalize_nonbinders\": used for pan-allele prediction. See\n            `RandomNegativePeptides.plan_by_allele_equalize_nonbinders` method.\n        \"recommended\": the default. Use by_length if the predictor is allele-\n            specific and by_allele if it's pan-allele.\n\n    \"\"\"\n\n    def __init__(self, **hyperparameters):\n        self.hyperparameters = self.hyperparameter_defaults.with_defaults(\n            hyperparameters)\n        self.plan_df = None\n        self.aa_distribution = None\n\n    def plan(self, peptides, affinities, alleles=None, inequalities=None):\n        \"\"\"\n        Calculate the number of random negatives for each allele and peptide\n        length. Call this once after instantiating the object.\n\n        Parameters\n        ----------\n        peptides : list of string\n        affinities : list of float\n        alleles : list of string, optional\n        inequalities : list of string (\">\", \"<\", or \"=\"), optional\n\n        Returns\n        -------\n        pandas.DataFrame indicating number of random negatives for each length\n        and allele.\n        \"\"\"\n        numpy.testing.assert_equal(len(peptides), len(affinities))\n        if alleles is not None:\n            numpy.testing.assert_equal(len(peptides), len(alleles))\n        if inequalities is not None:\n            numpy.testing.assert_equal(len(peptides), len(inequalities))\n\n        peptides = pandas.Series(peptides, copy=False)\n        peptide_lengths = peptides.str.len()\n\n        if self.hyperparameters['random_negative_match_distribution']:\n            self.aa_distribution = amino_acid_distribution(\n                peptides.values,\n                smoothing=self.hyperparameters[\n                    'random_negative_distribution_smoothing'\n                ])\n            logging.info(\n                \"Using amino acid distribution for random negative:\\n%s\" % (\n                    str(self.aa_distribution.to_dict())))\n\n        df_all = pandas.DataFrame({\n            'length': peptide_lengths,\n            'affinity': affinities,\n        })\n        df_all[\"allele\"] = \"\" if alleles is None else alleles\n        df_all[\"inequality\"] = \"=\" if inequalities is None else inequalities\n\n        df_binders = None\n        df_nonbinders = None\n        if self.hyperparameters['random_negative_binder_threshold']:\n            df_nonbinders = df_all.loc[\n                (df_all.inequality != \"<\") &\n                (df_all.affinity > self.hyperparameters[\n                    'random_negative_binder_threshold'\n                ])\n            ]\n            df_binders = df_all.loc[\n                (df_all.inequality != \">\") &\n                (df_all.affinity <= self.hyperparameters[\n                    'random_negative_binder_threshold'\n                ])\n            ]\n\n        method = self.hyperparameters['random_negative_method']\n        if method == 'recommended':\n            # by_length for allele-specific prediction and by_allele for pan.\n            method = (\n                \"by_length\"\n                if alleles is None else\n                \"by_allele\")\n\n        function = {\n            'by_length': self.plan_by_length,\n            'by_allele': self.plan_by_allele,\n            'by_allele_equalize_nonbinders':\n                self.plan_by_allele_equalize_nonbinders,\n        }[method]\n        function(df_all, df_binders, df_nonbinders)\n        assert self.plan_df is not None\n        logging.info(\"Random negative plan [%s]:\\n%s\", method, self.plan_df)\n        return self.plan_df\n\n    def plan_by_length(self, df_all, df_binders=None, df_nonbinders=None):\n        \"\"\"\n        Generate a random negative plan using the \"by_length\" policy.\n\n        Parameters are as in the `plan` method. No return value.\n\n        Used for allele-specific predictors. Does not work well for pan-allele.\n\n        Different numbers of random negatives per length. Alleles are sampled\n        proportionally to the number of times they are used in the training\n        data.\n        \"\"\"\n        assert list(df_all.allele.unique()) == [\"\"], (\n            \"by_length only recommended for allele specific prediction\")\n\n        df = df_all if df_binders is None else df_binders\n        lengths = self.hyperparameters['random_negative_lengths']\n\n        length_to_num_random_negative = {}\n        length_counts = df.length.value_counts().to_dict()\n        for length in lengths:\n            length_to_num_random_negative[length] = int(\n                length_counts.get(length, 0) *\n                self.hyperparameters['random_negative_rate'] +\n                self.hyperparameters['random_negative_constant'])\n\n        plan_df = pandas.DataFrame(index=sorted(df.allele.unique()))\n        for length in lengths:\n            plan_df[length] = length_to_num_random_negative[length]\n        self.plan_df = plan_df.astype(int)\n\n    def plan_by_allele(self, df_all, df_binders=None, df_nonbinders=None):\n        \"\"\"\n        Generate a random negative plan using the \"by_allele\" policy.\n\n        Parameters are as in the `plan` method. No return value.\n\n        For each allele, a particular number of random negatives are used\n        for all lengths. Across alleles, the number of random negatives\n        varies; within an allele, the number of random negatives for each\n        length is a constant\n        \"\"\"\n        allele_to_num_per_length = {}\n        total_random_peptides_per_length = 0\n        df = df_all if df_binders is None else df_binders\n        lengths = self.hyperparameters['random_negative_lengths']\n        all_alleles = df_all.allele.unique()\n        for allele in all_alleles:\n            sub_df = df.loc[df.allele == allele]\n            num_for_allele = len(sub_df) * (\n                self.hyperparameters['random_negative_rate']\n            ) + self.hyperparameters['random_negative_constant']\n            num_per_length = int(math.ceil(\n                num_for_allele / len(lengths)))\n            total_random_peptides_per_length += num_per_length\n            allele_to_num_per_length[allele] = num_per_length\n\n        plan_df = pandas.DataFrame(index=sorted(df.allele.unique()))\n        for length in lengths:\n            plan_df[length] = plan_df.index.map(allele_to_num_per_length)\n        self.plan_df = plan_df.astype(int)\n\n    def plan_by_allele_equalize_nonbinders(\n            self, df_all, df_binders, df_nonbinders):\n        \"\"\"\n        Generate a random negative plan using the\n        \"by_allele_equalize_nonbinders\" policy.\n\n        Parameters are as in the `plan` method. No return value.\n\n        Requires that the random_negative_binder_threshold hyperparameter is set.\n\n        In a first step, the number of random negatives selected by the\n        \"by_allele\" method are added (see `plan_by_allele`). Then, the total\n        number of non-binders are calculated for each allele and length. This\n        total includes non-binder measurements in the training data plus the\n        random negative peptides added in the first step. In a second step,\n        additional random negative peptides are added so that for each allele,\n        all peptide lengths have the same total number of non-binders.\n        \"\"\"\n        assert df_binders is not None\n        assert df_nonbinders is not None\n\n        lengths = self.hyperparameters['random_negative_lengths']\n\n        self.plan_by_allele(df_all, df_binders, df_nonbinders)\n        first_pass_plan = self.plan_df\n        self.plan_df = None\n\n        # Use floating point while populating so NaN assignment remains valid\n        # across pandas versions; cast to int at the end.\n        new_plan = first_pass_plan.astype(float).copy()\n        new_plan[:] = numpy.nan\n\n        for (allele, first_pass_per_length) in first_pass_plan.iterrows():\n            real_nonbinders_by_length = df_nonbinders.loc[\n                df_nonbinders.allele == allele\n            ].length.value_counts().reindex(lengths).fillna(0)\n            total_nonbinders_by_length = (\n                real_nonbinders_by_length + first_pass_per_length)\n            new_plan.loc[allele] = first_pass_per_length + (\n                total_nonbinders_by_length.max() - total_nonbinders_by_length)\n\n        if new_plan.isna().any().any():\n            raise AssertionError(\n                \"Random negative plan contains NaN after equalization; \"\n                \"this indicates an incomplete per-allele assignment bug.\"\n            )\n\n        self.plan_df = new_plan.astype(int)\n\n    def get_alleles(self):\n        \"\"\"\n        Get the list of alleles corresponding to each random negative peptide\n        as returned by `get_peptides`. This does NOT change and can be safely\n        called once and reused.\n\n        Returns\n        -------\n        list of string\n        \"\"\"\n        assert self.plan_df is not None, \"Call plan() first\"\n        alleles = []\n        for allele, row in self.plan_df.iterrows():\n            alleles.extend([allele] * int(row.sum()))\n        assert len(alleles) == self.get_total_count()\n        return alleles\n\n    def get_peptides(self):\n        \"\"\"\n        Get the list of random negative peptides. This will be different each\n        time the method is called.\n\n        Returns\n        -------\n        list of string\n\n        \"\"\"\n        assert self.plan_df is not None, \"Call plan() first\"\n        peptides = []\n        for allele, row in self.plan_df.iterrows():\n            for (length, num) in row.items():\n                peptides.extend(\n                    random_peptides(\n                        num,\n                        length=length,\n                        distribution=self.aa_distribution))\n        assert len(peptides) == self.get_total_count()\n        return peptides\n\n    def get_total_count(self):\n        \"\"\"\n        Total number of planned random negative peptides.\n\n        Returns\n        -------\n        int\n        \"\"\"\n        return self.plan_df.sum().sum()\n"
  },
  {
    "path": "mhcflurry/regression_target.py",
    "content": "import numpy\n\n\ndef from_ic50(ic50, max_ic50=50000.0):\n    \"\"\"\n    Convert ic50s to regression targets in the range [0.0, 1.0].\n\n    Parameters\n    ----------\n    ic50 : numpy.array of float\n\n    Returns\n    -------\n    numpy.array of float\n\n    \"\"\"\n    x = 1.0 - (numpy.log(numpy.maximum(ic50, 1e-12)) / numpy.log(max_ic50))\n    return numpy.minimum(\n        1.0,\n        numpy.maximum(0.0, x))\n\n\ndef to_ic50(x, max_ic50=50000.0):\n    \"\"\"\n    Convert regression targets in the range [0.0, 1.0] to ic50s in the range\n    [0, 50000.0].\n\n    Parameters\n    ----------\n    x : numpy.array of float\n\n    Returns\n    -------\n    numpy.array of float\n    \"\"\"\n    return max_ic50 ** (1.0 - x)\n"
  },
  {
    "path": "mhcflurry/scoring.py",
    "content": "\"\"\"\nMeasures of prediction accuracy\n\"\"\"\nimport logging\nimport sklearn.metrics\nimport numpy\nimport scipy\n\nfrom .regression_target import from_ic50\n\n\ndef make_scores(\n        ic50_y,\n        ic50_y_pred,\n        sample_weight=None,\n        threshold_nm=500,\n        max_ic50=50000):\n    \"\"\"\n    Calculate AUC, F1, and Kendall Tau scores.\n\n    Parameters\n    -----------\n    ic50_y : float list\n        true IC50s (i.e. affinities)\n\n    ic50_y_pred : float list\n        predicted IC50s\n\n    sample_weight : float list [optional]\n\n    threshold_nm : float [optional]\n\n    max_ic50 : float [optional]\n\n    Returns\n    -----------\n    dict with entries \"auc\", \"f1\", \"tau\"\n    \"\"\"\n\n    y_pred = from_ic50(ic50_y_pred, max_ic50)\n    try:\n        auc = sklearn.metrics.roc_auc_score(\n            ic50_y <= threshold_nm,\n            y_pred,\n            sample_weight=sample_weight)\n    except ValueError as e:\n        logging.warning(e)\n        auc = numpy.nan\n    try:\n        f1 = sklearn.metrics.f1_score(\n            ic50_y <= threshold_nm,\n            ic50_y_pred <= threshold_nm,\n            sample_weight=sample_weight)\n    except ValueError as e:\n        logging.warning(e)\n        f1 = numpy.nan\n    try:\n        tau = scipy.stats.kendalltau(ic50_y_pred, ic50_y)[0]\n    except ValueError as e:\n        logging.warning(e)\n        tau = numpy.nan\n\n    return dict(\n        auc=auc,\n        f1=f1,\n        tau=tau)\n"
  },
  {
    "path": "mhcflurry/select_allele_specific_models_command.py",
    "content": "\"\"\"\nModel select class1 single allele models.\n\"\"\"\nimport argparse\nimport os\nimport signal\nimport sys\nimport time\nimport traceback\nimport random\nfrom functools import partial\nfrom pprint import pprint\n\nimport numpy\nimport pandas\nfrom scipy.stats import kendalltau, percentileofscore, pearsonr\nfrom sklearn.metrics import roc_auc_score\n\n\nimport tqdm  # progress bar\n\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .common import normalize_allele_name\nfrom .encodable_sequences import EncodableSequences\nfrom .common import configure_logging, random_peptides\nfrom .local_parallelism import worker_pool_with_gpu_assignments_from_args, add_local_parallelism_args\nfrom .regression_target import from_ic50\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data\",\n    metavar=\"FILE.csv\",\n    required=False,\n    help=(\n        \"Model selection data CSV. Expected columns: \"\n        \"allele, peptide, measurement_value\"))\nparser.add_argument(\n    \"--exclude-data\",\n    metavar=\"FILE.csv\",\n    required=False,\n    help=(\n        \"Data to EXCLUDE from model selection. Useful to specify the original \"\n        \"training data used\"))\nparser.add_argument(\n    \"--models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to read models\")\nparser.add_argument(\n    \"--out-models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to write selected models\")\nparser.add_argument(\n    \"--out-unselected-predictions\",\n    metavar=\"FILE.csv\",\n    help=\"Write predictions for validation data using unselected predictor to \"\n    \"FILE.csv\")\nparser.add_argument(\n    \"--unselected-accuracy-scorer\",\n    metavar=\"SCORER\",\n    default=\"combined:mass-spec,mse\")\nparser.add_argument(\n    \"--unselected-accuracy-scorer-num-samples\",\n    type=int,\n    default=1000)\nparser.add_argument(\n    \"--unselected-accuracy-percentile-threshold\",\n    type=float,\n    metavar=\"X\",\n    default=95)\nparser.add_argument(\n    \"--allele\",\n    default=None,\n    nargs=\"+\",\n    help=\"Alleles to select models for. If not specified, all alleles with \"\n    \"enough measurements will be used.\")\nparser.add_argument(\n    \"--combined-min-models\",\n    type=int,\n    default=8,\n    metavar=\"N\",\n    help=\"Min number of models to select per allele when using combined selector\")\nparser.add_argument(\n    \"--combined-max-models\",\n    type=int,\n    default=1000,\n    metavar=\"N\",\n    help=\"Max number of models to select per allele when using combined selector\")\nparser.add_argument(\n    \"--combined-min-contribution-percent\",\n    type=float,\n    default=1.0,\n    metavar=\"X\",\n    help=\"Use only model selectors that can contribute at least X %% to the \"\n    \"total score. Default: %(default)s\")\n\nparser.add_argument(\n    \"--mass-spec-min-measurements\",\n    type=int,\n    metavar=\"N\",\n    default=1,\n    help=\"Min number of measurements required for an allele to use mass-spec model \"\n    \"selection\")\nparser.add_argument(\n    \"--mass-spec-min-models\",\n    type=int,\n    default=8,\n    metavar=\"N\",\n    help=\"Min number of models to select per allele when using mass-spec selector\")\nparser.add_argument(\n    \"--mass-spec-max-models\",\n    type=int,\n    default=1000,\n    metavar=\"N\",\n    help=\"Max number of models to select per allele when using mass-spec selector\")\nparser.add_argument(\n    \"--mse-min-measurements\",\n    type=int,\n    metavar=\"N\",\n    default=1,\n    help=\"Min number of measurements required for an allele to use MSE model \"\n    \"selection\")\nparser.add_argument(\n    \"--mse-min-models\",\n    type=int,\n    default=8,\n    metavar=\"N\",\n    help=\"Min number of models to select per allele when using MSE selector\")\nparser.add_argument(\n    \"--mse-max-models\",\n    type=int,\n    default=1000,\n    metavar=\"N\",\n    help=\"Max number of models to select per allele when using MSE selector\")\nparser.add_argument(\n    \"--scoring\",\n    nargs=\"+\",\n    default=[\"mse\", \"consensus\"],\n    help=\"Scoring procedures to use in order\")\nparser.add_argument(\n    \"--consensus-min-models\",\n    type=int,\n    default=8,\n    metavar=\"N\",\n    help=\"Min number of models to select per allele when using consensus selector\")\nparser.add_argument(\n    \"--consensus-max-models\",\n    type=int,\n    default=1000,\n    metavar=\"N\",\n    help=\"Max number of models to select per allele when using consensus selector\")\nparser.add_argument(\n    \"--consensus-num-peptides-per-length\",\n    type=int,\n    default=10000,\n    help=\"Num peptides per length to use for consensus scoring\")\nparser.add_argument(\n    \"--mass-spec-regex\",\n    metavar=\"REGEX\",\n    default=\"mass[- ]spec\",\n    help=\"Regular expression for mass-spec data. Runs on measurement_source col.\"\n    \"Default: %(default)s.\")\nparser.add_argument(\n    \"--verbosity\",\n    type=int,\n    help=\"Verbosity. Default: %(default)s\",\n    default=0)\n\nadd_local_parallelism_args(parser)\n\n\ndef run(argv=sys.argv[1:]):\n    global GLOBAL_DATA\n\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    args.out_models_dir = os.path.abspath(args.out_models_dir)\n\n    configure_logging(verbose=args.verbosity > 1)\n\n    input_predictor = Class1AffinityPredictor.load(args.models_dir)\n    print(\"Loaded: %s\" % input_predictor)\n\n    if args.allele:\n        alleles = [normalize_allele_name(a) for a in args.allele]\n    else:\n        alleles = input_predictor.supported_alleles\n\n    metadata_dfs = {}\n    if args.data:\n        df = pandas.read_csv(args.data)\n        print(\"Loaded data: %s\" % (str(df.shape)))\n\n        df = df.loc[\n            (df.peptide.str.len() >= 8) & (df.peptide.str.len() <= 15)\n        ]\n        print(\"Subselected to 8-15mers: %s\" % (str(df.shape)))\n\n        # Allele names in data are assumed to be already normalized.\n        df = df.loc[df.allele.isin(alleles)].dropna()\n        print(\"Selected %d alleles: %s\" % (len(alleles), ' '.join(alleles)))\n\n        if args.exclude_data:\n            exclude_df = pandas.read_csv(args.exclude_data)\n            metadata_dfs[\"model_selection_exclude\"] = exclude_df\n            print(\"Loaded exclude data: %s\" % (str(df.shape)))\n\n            df[\"_key\"] = df.allele + \"__\" + df.peptide\n            exclude_df[\"_key\"] = exclude_df.allele + \"__\" + exclude_df.peptide\n            df[\"_excluded\"] = df._key.isin(exclude_df._key.unique())\n            print(\"Excluding measurements per allele (counts): \")\n            print(df.groupby(\"allele\")._excluded.sum())\n\n            print(\"Excluding measurements per allele (fractions): \")\n            print(df.groupby(\"allele\")._excluded.mean())\n\n            df = df.loc[~df._excluded]\n            del df[\"_excluded\"]\n            del df[\"_key\"]\n            print(\"Reduced data to: %s\" % (str(df.shape)))\n\n        metadata_dfs[\"model_selection_data\"] = df\n\n        df[\"mass_spec\"] = df.measurement_source.str.contains(\n            args.mass_spec_regex)\n    else:\n        df = None\n\n    if args.out_unselected_predictions:\n        df[\"unselected_prediction\"] = input_predictor.predict(\n            alleles=df.allele.values,\n            peptides=df.peptide.values)\n        df.to_csv(args.out_unselected_predictions)\n        print(\"Wrote: %s\" % args.out_unselected_predictions)\n\n    selectors = {}\n    selector_to_model_selection_kwargs = {}\n\n    def make_selector(\n            scoring,\n            combined_min_contribution_percent=args.combined_min_contribution_percent):\n        if scoring in selectors:\n            return (\n                selectors[scoring], selector_to_model_selection_kwargs[scoring])\n\n        start = time.time()\n        if scoring.startswith(\"combined:\"):\n            model_selection_kwargs = {\n                'min_models': args.combined_min_models,\n                'max_models': args.combined_max_models,\n            }\n            component_selectors = []\n            for component_selector in scoring.split(\":\", 1)[1].split(\",\"):\n                component_selectors.append(\n                    make_selector(\n                        component_selector)[0])\n            selector = CombinedModelSelector(\n                component_selectors,\n                min_contribution_percent=combined_min_contribution_percent)\n        elif scoring == \"mse\":\n            model_selection_kwargs = {\n                'min_models': args.mse_min_models,\n                'max_models': args.mse_max_models,\n            }\n            min_measurements = args.mse_min_measurements\n            selector = MSEModelSelector(\n                df=df.loc[~df.mass_spec],\n                predictor=input_predictor,\n                min_measurements=min_measurements)\n        elif scoring == \"mass-spec\":\n            mass_spec_df = df.loc[df.mass_spec]\n            model_selection_kwargs = {\n                'min_models': args.mass_spec_min_models,\n                'max_models': args.mass_spec_max_models,\n            }\n            min_measurements = args.mass_spec_min_measurements\n            selector = MassSpecModelSelector(\n                df=mass_spec_df,\n                predictor=input_predictor,\n                min_measurements=min_measurements)\n        elif scoring == \"consensus\":\n            model_selection_kwargs = {\n                'min_models': args.consensus_min_models,\n                'max_models': args.consensus_max_models,\n            }\n            selector = ConsensusModelSelector(\n                predictor=input_predictor,\n                num_peptides_per_length=args.consensus_num_peptides_per_length)\n        else:\n            raise ValueError(\"Unsupported scoring method: %s\" % scoring)\n        print(\"Instantiated model selector %s in %0.2f sec.\" % (\n            scoring, time.time() - start))\n        return (selector, model_selection_kwargs)\n\n    for scoring in args.scoring:\n        (selector, model_selection_kwargs) = make_selector(scoring)\n        selectors[scoring] = selector\n        selector_to_model_selection_kwargs[scoring] = model_selection_kwargs\n\n    unselected_accuracy_scorer = None\n    if args.unselected_accuracy_scorer:\n        # Force running all selectors by setting combined_min_contribution_percent=0.\n        unselected_accuracy_scorer = make_selector(\n            args.unselected_accuracy_scorer,\n            combined_min_contribution_percent=0.0)[0]\n        print(\"Using unselected accuracy scorer: %s\" % unselected_accuracy_scorer)\n    GLOBAL_DATA[\"unselected_accuracy_scorer\"] = unselected_accuracy_scorer\n\n    print(\"Selectors for alleles:\")\n    allele_to_selector = {}\n    allele_to_model_selection_kwargs = {}\n    for allele in alleles:\n        selector = None\n        for possible_selector in args.scoring:\n            if selectors[possible_selector].usable_for_allele(allele=allele):\n                selector = selectors[possible_selector]\n                print(\"%20s %s\" % (allele, selector.plan_summary(allele)))\n                break\n        if selector is None:\n            raise ValueError(\"No selectors usable for allele: %s\" % allele)\n        allele_to_selector[allele] = selector\n        allele_to_model_selection_kwargs[allele] = (\n            selector_to_model_selection_kwargs[possible_selector])\n\n    GLOBAL_DATA[\"args\"] = args\n    GLOBAL_DATA[\"input_predictor\"] = input_predictor\n    GLOBAL_DATA[\"unselected_accuracy_scorer\"] = unselected_accuracy_scorer\n    GLOBAL_DATA[\"allele_to_selector\"] = allele_to_selector\n    GLOBAL_DATA[\"allele_to_model_selection_kwargs\"] = allele_to_model_selection_kwargs\n\n    if not os.path.exists(args.out_models_dir):\n        print(\"Attempting to create directory: %s\" % args.out_models_dir)\n        os.mkdir(args.out_models_dir)\n        print(\"Done.\")\n\n    result_predictor = Class1AffinityPredictor(metadata_dataframes=metadata_dfs)\n\n    worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n\n    start = time.time()\n\n    if worker_pool is None:\n        # Serial run\n        print(\"Running in serial.\")\n        results = (\n            model_select(allele) for allele in alleles)\n    else:\n        # Parallel run\n        random.shuffle(alleles)\n        results = worker_pool.imap_unordered(\n            partial(model_select, constant_data=GLOBAL_DATA),\n            alleles,\n            chunksize=1)\n\n    unselected_summary = []\n    model_selection_dfs = []\n    for result in tqdm.tqdm(results, total=len(alleles)):\n        pprint(result)\n\n        summary_dict = dict(result)\n        summary_dict[\"retained\"] = result[\"selected\"] is not None\n        del summary_dict[\"selected\"]\n\n        unselected_summary.append(summary_dict)\n        if result['selected'] is not None:\n            model_selection_dfs.append(\n                result['selected'].metadata_dataframes['model_selection'])\n            result_predictor.merge_in_place([result['selected']])\n\n    if model_selection_dfs:\n        model_selection_df = pandas.concat(\n            model_selection_dfs, ignore_index=True)\n        model_selection_df[\"selector\"] = model_selection_df.allele.map(\n            allele_to_selector)\n        result_predictor.metadata_dataframes[\"model_selection\"] = (\n            model_selection_df)\n\n    result_predictor.metadata_dataframes[\"unselected_summary\"] = (\n        pandas.DataFrame(unselected_summary))\n\n    print(\"Done model selecting for %d alleles.\" % len(alleles))\n    result_predictor.save(args.out_models_dir)\n\n    model_selection_time = time.time() - start\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n    print(\"Model selection time %0.2f min.\" % (model_selection_time / 60.0))\n    print(\"Predictor written to: %s\" % args.out_models_dir)\n\n\nclass ScrambledPredictor(object):\n    def __init__(self, predictor):\n        self.predictor = predictor\n        self._predictions = {}\n        self._allele = None\n\n    def predict(self, peptides, allele):\n        if peptides not in self._predictions:\n            self._predictions[peptides] = pandas.Series(\n                self.predictor.predict(peptides=peptides, allele=allele))\n            self._allele = allele\n        assert allele == self._allele\n        return self._predictions[peptides].sample(frac=1.0).values\n\n\ndef model_select(allele, constant_data=GLOBAL_DATA):\n    unselected_accuracy_scorer = constant_data[\"unselected_accuracy_scorer\"]\n    selector = constant_data[\"allele_to_selector\"][allele]\n    model_selection_kwargs = constant_data[\n        \"allele_to_model_selection_kwargs\"\n    ][allele]\n    predictor = constant_data[\"input_predictor\"]\n    args = constant_data[\"args\"]\n    unselected_accuracy_scorer_samples = constant_data[\"args\"].unselected_accuracy_scorer_num_samples\n\n    result_dict = {\n        \"allele\": allele\n    }\n\n    unselected_score = None\n    unselected_score_percentile = None\n    unselected_score_scrambled_mean = None\n    if unselected_accuracy_scorer:\n        unselected_score_function = (\n            unselected_accuracy_scorer.score_function(allele))\n\n        additional_metadata = {}\n        unselected_score = unselected_score_function(\n            predictor, additional_metadata_out=additional_metadata)\n        scrambled_predictor = ScrambledPredictor(predictor)\n        scrambled_scores = numpy.array([\n            unselected_score_function(\n                scrambled_predictor)\n            for _ in range(unselected_accuracy_scorer_samples)\n        ])\n        unselected_score_scrambled_mean = scrambled_scores.mean()\n        unselected_score_percentile = percentileofscore(\n            scrambled_scores, unselected_score)\n        print(\n            \"Unselected score and percentile\",\n            allele,\n            unselected_score,\n            unselected_score_percentile,\n            additional_metadata)\n        result_dict.update(\n            dict((\"unselected_%s\" % key, value)\n                 for (key, value)\n                 in additional_metadata.items()))\n\n    selected = None\n    threshold = args.unselected_accuracy_percentile_threshold\n    if unselected_score_percentile is None or unselected_score_percentile >= threshold:\n        selected = predictor.model_select(\n            score_function=selector.score_function(allele=allele),\n            alleles=[allele],\n            **model_selection_kwargs)\n\n    result_dict[\"unselected_score_plan\"] = (\n        unselected_accuracy_scorer.plan_summary(allele)\n        if unselected_accuracy_scorer else None)\n    result_dict[\"selector_score_plan\"] = selector.plan_summary(allele)\n    result_dict[\"unselected_accuracy_score_percentile\"] = unselected_score_percentile\n    result_dict[\"unselected_score\"] = unselected_score\n    result_dict[\"unselected_score_scrambled_mean\"] = unselected_score_scrambled_mean\n    result_dict[\"selected\"] = selected\n    result_dict[\"num_models\"] = len(selected.neural_networks) if selected else None\n    return result_dict\n\n\ndef cache_encoding(predictor, peptides):\n    # Encode the peptides for each neural network, so the encoding\n    # becomes cached.\n    for network in predictor.neural_networks:\n        network.peptides_to_network_input(peptides)\n\n\nclass ScoreFunction(object):\n    \"\"\"\n    Thin wrapper over a score function (Class1AffinityPredictor -> float).\n    Used to keep a summary string associated with the function.\n    \"\"\"\n    def __init__(self, function, summary=None):\n        self.function = function\n        self.summary = summary if summary else \"(n/a)\"\n\n    def __call__(self, *args, **kwargs):\n        return self.function(*args, **kwargs)\n\n\nclass CombinedModelSelector(object):\n    \"\"\"\n    Model selector that computes a weighted average over other model selectors.\n    \"\"\"\n    def __init__(self, model_selectors, weights=None, min_contribution_percent=1.0):\n        if weights is None:\n            weights = numpy.ones(shape=(len(model_selectors),))\n        self.model_selectors = model_selectors\n        self.selector_to_weight = dict(zip(self.model_selectors, weights))\n        self.min_contribution_percent = min_contribution_percent\n\n    def usable_for_allele(self, allele):\n        return any(\n            selector.usable_for_allele(allele)\n            for selector in self.model_selectors)\n\n    def plan_summary(self, allele):\n        return self.score_function(allele, dry_run=True).summary\n\n    def score_function(self, allele, dry_run=False):\n        selector_to_max_weighted_score = {}\n        for selector in self.model_selectors:\n            weight = self.selector_to_weight[selector]\n            if selector.usable_for_allele(allele):\n                max_weighted_score = selector.max_absolute_value(allele) * weight\n            else:\n                max_weighted_score = 0\n            selector_to_max_weighted_score[selector] = max_weighted_score\n        max_total_score = sum(selector_to_max_weighted_score.values())\n\n        # Use only selectors that can contribute >1% to the total score\n        selectors_to_use = [\n            selector\n            for selector in self.model_selectors\n            if (\n                selector_to_max_weighted_score[selector] >\n                max_total_score * self.min_contribution_percent / 100.0)\n        ]\n\n        summary = \", \".join([\n            \"%s(|%.3f|)\" % (\n                selector.plan_summary(allele),\n                selector_to_max_weighted_score[selector])\n            for selector in selectors_to_use\n        ])\n\n        if dry_run:\n            score = None\n        else:\n            score_functions_and_weights = [\n                (selector.score_function(allele=allele),\n                 self.selector_to_weight[selector])\n                for selector in selectors_to_use\n            ]\n\n            def score(predictor, additional_metadata_out=None):\n                scores = numpy.array([\n                    score_function(\n                        predictor,\n                        additional_metadata_out=additional_metadata_out) * weight\n                    for (score_function, weight) in score_functions_and_weights\n                ])\n                if additional_metadata_out is not None:\n                    additional_metadata_out[\"combined_score_terms\"] = str(\n                        list(scores))\n\n                return scores.sum()\n        return ScoreFunction(score, summary=summary)\n\n\nclass ConsensusModelSelector(object):\n    \"\"\"\n    Model selector that scores sub-ensembles based on their Kendall tau\n    consistency with the full ensemble over a set of random peptides.\n    \"\"\"\n    def __init__(\n            self,\n            predictor,\n            num_peptides_per_length=10000,\n            multiply_score_by_value=10.0):\n\n        (min_length, max_length) = predictor.supported_peptide_lengths\n        peptides = []\n        for length in range(min_length, max_length + 1):\n            peptides.extend(\n                random_peptides(num_peptides_per_length, length=length))\n\n        self.peptides = EncodableSequences.create(peptides)\n        self.predictor = predictor\n        self.multiply_score_by_value = multiply_score_by_value\n        cache_encoding(self.predictor, self.peptides)\n\n    def usable_for_allele(self, allele):\n        return True\n\n    def max_absolute_value(self, allele):\n        return self.multiply_score_by_value\n\n    def plan_summary(self, allele):\n        return \"consensus (%d points)\" % len(self.peptides)\n\n    def score_function(self, allele):\n        full_ensemble_predictions = self.predictor.predict(\n            allele=allele,\n            peptides=self.peptides)\n\n        def score(predictor, additional_metadata_out=None):\n            predictions = predictor.predict(\n                allele=allele,\n                peptides=self.peptides,\n            )\n            tau = kendalltau(predictions, full_ensemble_predictions).correlation\n            if additional_metadata_out is not None:\n                additional_metadata_out[\"score_consensus_tau\"] = tau\n            return tau * self.multiply_score_by_value\n\n        return ScoreFunction(\n            score, summary=self.plan_summary(allele))\n\n\nclass MSEModelSelector(object):\n    \"\"\"\n    Model selector that uses mean-squared error to score models. Inequalities\n    are supported.\n    \"\"\"\n    def __init__(\n            self,\n            df,\n            predictor,\n            min_measurements=1,\n            multiply_score_by_data_size=True):\n\n        self.df = df\n        self.predictor = predictor\n        self.min_measurements = min_measurements\n        self.multiply_score_by_data_size = multiply_score_by_data_size\n\n    def usable_for_allele(self, allele):\n        return (self.df.allele == allele).sum() >= self.min_measurements\n\n    def max_absolute_value(self, allele):\n        if self.multiply_score_by_data_size:\n            return (self.df.allele == allele).sum()\n        else:\n            return 1.0\n\n    def plan_summary(self, allele):\n        return self.score_function(allele).summary\n\n    def score_function(self, allele):\n        sub_df = self.df.loc[self.df.allele == allele].reset_index(drop=True)\n        peptides = EncodableSequences.create(sub_df.peptide.values)\n\n        def score(predictor, additional_metadata_out=None):\n            predictions = predictor.predict(\n                allele=allele,\n                peptides=peptides,\n            )\n            deviations = from_ic50(predictions) - from_ic50(\n                sub_df.measurement_value)\n\n            if 'measurement_inequality' in sub_df.columns:\n                # Must reverse meaning of inequality since we are working with\n                # transformed 0-1 values, which are anti-correlated with the ic50s.\n                # The measurement_inequality column is given in terms of ic50s.\n                deviations.loc[\n                    (\n                    (sub_df.measurement_inequality == \"<\") & (deviations > 0)) |\n                    ((sub_df.measurement_inequality == \">\") & (deviations < 0))\n                    ] = 0.0\n\n            score_mse = (1 - (deviations ** 2).mean())\n            if additional_metadata_out is not None:\n                additional_metadata_out[\"score_MSE\"] = 1 - score_mse\n\n                # We additionally include other scores on (=) measurements as\n                # a convenience\n                eq_df = sub_df\n                if 'measurement_inequality' in sub_df.columns:\n                    eq_df = sub_df.loc[\n                        sub_df.measurement_inequality == \"=\"\n                        ]\n                additional_metadata_out[\"score_pearsonr\"] = (\n                    pearsonr(\n                        numpy.log(eq_df.measurement_value.values),\n                        numpy.log(predictions[eq_df.index.values]))[0])\n\n                for threshold in [500, 5000, 15000]:\n                    if (eq_df.measurement_value < threshold).nunique() == 2:\n                        additional_metadata_out[\"score_AUC@%d\" % threshold] = (\n                            roc_auc_score(\n                                (eq_df.measurement_value < threshold).values,\n                                -1 * predictions[eq_df.index.values]))\n\n            return score_mse * (\n                len(sub_df) if self.multiply_score_by_data_size else 1)\n\n        summary = \"mse (%d points)\" % (len(sub_df))\n        return ScoreFunction(score, summary=summary)\n\n\nclass MassSpecModelSelector(object):\n    \"\"\"\n    Model selector that uses PPV of differentiating decoys from hits from\n    mass-spec experiments.\n    \"\"\"\n    def __init__(\n            self,\n            df,\n            predictor,\n            decoys_per_length=0,\n            min_measurements=100,\n            multiply_score_by_data_size=True):\n\n        # Index is peptide, columns are alleles\n        hit_matrix = df.groupby(\n            [\"peptide\", \"allele\"]).measurement_value.count().unstack().fillna(\n            0).astype(bool)\n\n        if decoys_per_length:\n            (min_length, max_length) = predictor.supported_peptide_lengths\n            decoys = []\n            for length in range(min_length, max_length + 1):\n                decoys.extend(\n                    random_peptides(decoys_per_length, length=length))\n\n            decoy_matrix = pandas.DataFrame(\n                index=decoys, columns=hit_matrix.columns, dtype=bool)\n            decoy_matrix[:] = False\n            full_matrix = pandas.concat([hit_matrix, decoy_matrix])\n        else:\n            full_matrix = hit_matrix\n\n        if len(full_matrix) > 0:\n            full_matrix = full_matrix.sample(frac=1.0).astype(float)\n\n        self.df = full_matrix\n        self.predictor = predictor\n        self.min_measurements = min_measurements\n        self.multiply_score_by_data_size = multiply_score_by_data_size\n\n        self.peptides = EncodableSequences.create(full_matrix.index.values)\n        cache_encoding(self.predictor, self.peptides)\n\n    @staticmethod\n    def ppv(y_true, predictions):\n        df = pandas.DataFrame({\"prediction\": predictions, \"y_true\": y_true})\n        return df.sort_values(\"prediction\", ascending=True)[\n            : int(y_true.sum())\n        ].y_true.mean()\n\n    def usable_for_allele(self, allele):\n        return allele in self.df.columns and (\n            self.df[allele].sum() >= self.min_measurements)\n\n    def max_absolute_value(self, allele):\n        if self.multiply_score_by_data_size:\n            return self.df[allele].sum()\n        else:\n            return 1.0\n\n    def plan_summary(self, allele):\n        return self.score_function(allele).summary\n\n    def score_function(self, allele):\n        total_hits = self.df[allele].sum()\n        total_decoys = (self.df[allele] == 0).sum()\n        multiplier = total_hits if self.multiply_score_by_data_size else 1\n        def score(predictor, additional_metadata_out=None):\n            predictions = predictor.predict(\n                allele=allele,\n                peptides=self.peptides,\n            )\n            ppv = self.ppv(self.df[allele], predictions)\n            if additional_metadata_out is not None:\n                additional_metadata_out[\"score_mass_spec_PPV\"] = ppv\n\n                # We additionally compute AUC score.\n                additional_metadata_out[\"score_mass_spec_AUC\"] = roc_auc_score(\n                    self.df[allele].values, -1 * predictions)\n            return ppv * multiplier\n\n        summary = \"mass-spec (%d hits / %d decoys)\" % (total_hits, total_decoys)\n        return ScoreFunction(score, summary=summary)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "mhcflurry/select_pan_allele_models_command.py",
    "content": "\"\"\"\nModel select class1 pan-allele models.\n\nAPPROACH: For each training fold, we select at least min and at most max models\n(where min and max are set by the --{min/max}-models-per-fold argument) using a\nstep-up (forward) selection procedure. The final ensemble is the union of all\nselected models across all folds.\n\"\"\"\nimport argparse\nimport os\nimport signal\nimport sys\nimport time\nimport traceback\nimport hashlib\nfrom pprint import pprint\n\nimport numpy\nimport pandas\n\nimport tqdm  # progress bar\n\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .encodable_sequences import EncodableSequences\nfrom .allele_encoding import AlleleEncoding\nfrom .common import configure_logging\nfrom .local_parallelism import (\n    worker_pool_with_gpu_assignments_from_args,\n    add_local_parallelism_args)\nfrom .cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\nfrom .regression_target import from_ic50\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data\",\n    metavar=\"FILE.csv\",\n    required=False,\n    help=(\n        \"Model selection data CSV. Expected columns: \"\n        \"allele, peptide, measurement_value\"))\nparser.add_argument(\n    \"--models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to read models\")\nparser.add_argument(\n    \"--out-models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to write selected models\")\nparser.add_argument(\n    \"--min-models-per-fold\",\n    type=int,\n    default=2,\n    metavar=\"N\",\n    help=\"Min number of models to select per fold\")\nparser.add_argument(\n    \"--max-models-per-fold\",\n    type=int,\n    default=1000,\n    metavar=\"N\",\n    help=\"Max number of models to select per fold\")\nparser.add_argument(\n    \"--mass-spec-regex\",\n    metavar=\"REGEX\",\n    default=\"mass[- ]spec\",\n    help=\"Regular expression for mass-spec data. Runs on measurement_source col.\"\n    \"Default: %(default)s.\")\nparser.add_argument(\n    \"--verbosity\",\n    type=int,\n    help=\"Verbosity. Default: %(default)s\",\n    default=0)\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\n\ndef mse(\n        predictions,\n        actual,\n        inequalities=None,\n        affinities_are_already_01_transformed=False):\n    \"\"\"\n    Mean squared error of predictions vs. actual\n\n    Parameters\n    ----------\n    predictions : list of float\n    actual : list of float\n    inequalities : list of string (\">\", \"<\", or \"=\")\n    affinities_are_already_01_transformed : boolean\n        Predictions and actual are taken to be nanomolar affinities if\n        affinities_are_already_01_transformed is False, otherwise 0-1 values.\n\n    Returns\n    -------\n    float\n    \"\"\"\n    if not affinities_are_already_01_transformed:\n        predictions = from_ic50(predictions)\n        actual = from_ic50(actual)\n\n    deviations = (\n        numpy.asarray(predictions) - numpy.asarray(actual))\n\n    if inequalities is not None:\n        # Must reverse meaning of inequality since we are working with\n        # transformed 0-1 values, which are anti-correlated with the ic50s.\n        # The measurement_inequality column is given in terms of ic50s.\n        inequalities = numpy.asarray(inequalities)\n        deviations[\n            ((inequalities == \"<\") & (deviations > 0)) | (\n             (inequalities == \">\") & (deviations < 0))\n        ] = 0.0\n\n    return (deviations ** 2).mean()\n\n\ndef run(argv=sys.argv[1:]):\n    global GLOBAL_DATA\n\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    args.out_models_dir = os.path.abspath(args.out_models_dir)\n\n    configure_logging(verbose=args.verbosity > 1)\n\n    df = pandas.read_csv(args.data)\n    print(\"Loaded data: %s\" % (str(df.shape)))\n\n    input_predictor = Class1AffinityPredictor.load(\n        args.models_dir, optimization_level=0)\n    print(\"Loaded: %s\" % input_predictor)\n\n    alleles = input_predictor.supported_alleles\n    (min_peptide_length, max_peptide_length) = (\n        input_predictor.supported_peptide_lengths)\n\n    metadata_dfs = {}\n\n    fold_cols = [c for c in df if c.startswith(\"fold_\")]\n    num_folds = len(fold_cols)\n    if num_folds <= 1:\n        raise ValueError(\"Too few folds: \", num_folds)\n\n    df = df.loc[\n        (df.peptide.str.len() >= min_peptide_length) &\n        (df.peptide.str.len() <= max_peptide_length)\n    ]\n    print(\"Subselected to %d-%dmers: %s\" % (\n        min_peptide_length, max_peptide_length, str(df.shape)))\n\n    print(\"Num folds: \", num_folds, \"fraction included:\")\n    print(df[fold_cols].mean())\n\n    # Allele names in data are assumed to be already normalized.\n    df = df.loc[df.allele.isin(alleles)]\n    print(\"Subselected to supported alleles: %s\" % str(df.shape))\n\n    metadata_dfs[\"model_selection_data\"] = df\n\n    df[\"mass_spec\"] = df.measurement_source.str.contains(\n        args.mass_spec_regex)\n\n    def make_train_peptide_hash(sub_df):\n        train_peptide_hash = hashlib.sha1()\n        for peptide in sorted(sub_df.peptide.values):\n            train_peptide_hash.update(peptide.encode())\n        return train_peptide_hash.hexdigest()\n\n    folds_to_predictors = dict(\n        (int(col.split(\"_\")[-1]), (\n            [],\n            make_train_peptide_hash(df.loc[df[col] == 1])))\n        for col in fold_cols)\n    print(folds_to_predictors)\n    for model in input_predictor.class1_pan_allele_models:\n        training_info = model.fit_info[-1]['training_info']\n        fold_num = training_info['fold_num']\n        assert num_folds == training_info['num_folds']\n        (lst, hash) = folds_to_predictors[fold_num]\n        train_peptide_hash = training_info['train_peptide_hash']\n        numpy.testing.assert_equal(hash, train_peptide_hash)\n        lst.append(model)\n\n    work_items = []\n    for (fold_num, (models, _)) in folds_to_predictors.items():\n        work_items.append({\n            'fold_num': fold_num,\n            'models': models,\n            'min_models': args.min_models_per_fold,\n            'max_models': args.max_models_per_fold,\n        })\n\n    GLOBAL_DATA[\"data\"] = df\n    GLOBAL_DATA[\"input_predictor\"] = input_predictor\n\n    if not os.path.exists(args.out_models_dir):\n        print(\"Attempting to create directory: %s\" % args.out_models_dir)\n        os.mkdir(args.out_models_dir)\n        print(\"Done.\")\n\n    result_predictor = Class1AffinityPredictor(\n        allele_to_sequence=input_predictor.allele_to_sequence,\n        metadata_dataframes=metadata_dfs)\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n    worker_pool = None\n    start = time.time()\n    if serial_run:\n        # Serial run\n        print(\"Running in serial.\")\n        results = (model_select(**item) for item in work_items)\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        print(\"Running on cluster.\")\n        results = cluster_results_from_args(\n            args,\n            work_function=model_select,\n            work_items=work_items,\n            constant_data=GLOBAL_DATA,\n            result_serialization_method=\"pickle\")\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n\n        print(\"Processing %d work items in parallel.\" % len(work_items))\n        assert not serial_run\n\n        for item in work_items:\n            item['constant_data'] = GLOBAL_DATA\n\n        # Parallel run\n        results = worker_pool.imap_unordered(\n            do_model_select_task,\n            work_items,\n            chunksize=1)\n\n    models_by_fold = {}\n    summary_dfs = []\n    for result in tqdm.tqdm(results, total=len(work_items)):\n        pprint(result)\n        fold_num = result['fold_num']\n        (all_models_for_fold, _) = folds_to_predictors[fold_num]\n        models = result['selected_models']\n        summary_df = result['summary'].copy()\n        summary_df.index = summary_df.index.map(\n            lambda idx: all_models_for_fold[idx])\n        summary_dfs.append(summary_df)\n\n        print(\"Selected %d models for fold %d: %s\" % (\n            len(models), fold_num, result['selected_indices']))\n        models_by_fold[fold_num] = models\n        for model in models:\n            result_predictor.add_pan_allele_model(model)\n\n    summary_df = pandas.concat(summary_dfs, ignore_index=False)\n    summary_df[\"model_config\"] = summary_df.index.map(lambda m: m.get_config())\n    result_predictor.metadata_dataframes[\"model_selection_summary\"] = (\n        summary_df.reset_index(drop=True))\n\n    result_predictor.save(args.out_models_dir)\n\n    model_selection_time = time.time() - start\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n    print(\"Model selection time %0.2f min.\" % (model_selection_time / 60.0))\n    print(\"Predictor [%d models] written to: %s\" % (\n        len(result_predictor.neural_networks),\n        args.out_models_dir))\n\n\ndef do_model_select_task(item, constant_data=GLOBAL_DATA):\n    if 'constant_data' in item:\n        constant_data = item.pop('constant_data')\n    return model_select(constant_data=constant_data, **item)\n\n\ndef model_select(\n        fold_num, models, min_models, max_models, constant_data=GLOBAL_DATA):\n    \"\"\"\n    Model select for a fold.\n\n    Parameters\n    ----------\n    fold_num : int\n    models : list of Class1NeuralNetwork\n    min_models : int\n    max_models : int\n    constant_data : dict\n\n    Returns\n    -------\n    dict with keys 'fold_num', 'selected_indices', 'summary'\n    \"\"\"\n    full_data = constant_data[\"data\"]\n    input_predictor = constant_data[\"input_predictor\"]\n    df = full_data.loc[\n        full_data[\"fold_%d\" % fold_num] == 0\n    ]\n\n    peptides = EncodableSequences.create(df.peptide.values)\n    alleles = AlleleEncoding(\n        df.allele.values,\n        borrow_from=input_predictor.master_allele_encoding)\n\n    predictions_df = df.copy()\n    for (i, model) in enumerate(models):\n        predictions_df[i] = from_ic50(model.predict(peptides, alleles))\n\n    actual = from_ic50(predictions_df.measurement_value)\n\n    selected = []\n    selected_score = 0\n    remaining_models = set(numpy.arange(len(models)))\n    individual_model_scores = {}\n    while remaining_models and len(selected) < max_models:\n        best_model = None\n        best_model_score = 0\n        for i in remaining_models:\n            possible_ensemble = list(selected) + [i]\n            predictions = predictions_df[possible_ensemble].mean(axis=1)\n            mse_score = 1 - mse(\n                predictions,\n                actual,\n                inequalities=(\n                    predictions_df.measurement_inequality\n                    if 'measurement_inequality' in predictions_df.columns\n                    else None),\n                affinities_are_already_01_transformed=True)\n            if mse_score >= best_model_score:\n                best_model = i\n                best_model_score = mse_score\n            if not selected:\n                # First iteration. Store individual model scores.\n                individual_model_scores[i] = mse_score\n        if len(selected) < min_models or best_model_score > selected_score:\n            selected_score = best_model_score\n            remaining_models.remove(best_model)\n            selected.append(best_model)\n        else:\n            break\n\n    assert selected\n    selected_models = [models[i] for i in selected]\n    for model in selected_models:\n        model.clear_allele_representations()\n\n    summary_df = pandas.Series(individual_model_scores)[\n        numpy.arange(len(models))\n    ].to_frame()\n    summary_df.columns = ['mse_score']\n\n    return {\n        'fold_num': fold_num,\n        'selected_indices': selected,\n        'selected_models': selected_models,\n        'summary': summary_df,  # indexed by model index\n    }\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "mhcflurry/select_processing_models_command.py",
    "content": "\"\"\"\nModel select antigen processing models.\n\nAPPROACH: For each training fold, we select at least min and at most max models\n(where min and max are set by the --{min/max}-models-per-fold argument) using a\nstep-up (forward) selection procedure. The final ensemble is the union of all\nselected models across all folds. AUC is used as the metric.\n\"\"\"\nimport argparse\nimport os\nimport signal\nimport sys\nimport time\nimport traceback\nimport hashlib\nfrom pprint import pprint\n\nimport numpy\nimport pandas\nfrom sklearn.metrics import roc_auc_score\n\nimport tqdm  # progress bar\n\nfrom .class1_processing_predictor import Class1ProcessingPredictor\nfrom .flanking_encoding import FlankingEncoding\nfrom .common import configure_logging\nfrom .local_parallelism import (\n    worker_pool_with_gpu_assignments_from_args,\n    add_local_parallelism_args)\nfrom .cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data\",\n    metavar=\"FILE.csv\",\n    required=False,\n    help=(\n        \"Model selection data CSV. Expected columns: \"\n        \"peptide, hit, fold_0, ..., fold_N\"))\nparser.add_argument(\n    \"--models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to read models\")\nparser.add_argument(\n    \"--out-models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to write selected models\")\nparser.add_argument(\n    \"--min-models-per-fold\",\n    type=int,\n    default=2,\n    metavar=\"N\",\n    help=\"Min number of models to select per fold\")\nparser.add_argument(\n    \"--max-models-per-fold\",\n    type=int,\n    default=1000,\n    metavar=\"N\",\n    help=\"Max number of models to select per fold\")\nparser.add_argument(\n    \"--verbosity\",\n    type=int,\n    help=\"Verbosity. Default: %(default)s\",\n    default=0)\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\n\n\ndef run(argv=sys.argv[1:]):\n    global GLOBAL_DATA\n\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    args.out_models_dir = os.path.abspath(args.out_models_dir)\n\n    configure_logging(verbose=args.verbosity > 1)\n\n    df = pandas.read_csv(args.data)\n    print(\"Loaded data: %s\" % (str(df.shape)))\n\n    input_predictor = Class1ProcessingPredictor.load(args.models_dir)\n    print(\"Loaded: %s\" % input_predictor)\n\n    metadata_dfs = {}\n\n    fold_cols = [c for c in df if c.startswith(\"fold_\")]\n    num_folds = len(fold_cols)\n    if num_folds <= 1:\n        raise ValueError(\"Too few folds: \", num_folds)\n\n    print(\"Num folds: \", num_folds, \"fraction included:\")\n    print(df[fold_cols].mean())\n\n    metadata_dfs[\"model_selection_data\"] = df\n\n\n    def make_train_peptide_hash(sub_df):\n        train_peptide_hash = hashlib.sha1()\n        for peptide in sorted(sub_df.peptide.values):\n            train_peptide_hash.update(peptide.encode())\n        return train_peptide_hash.hexdigest()\n\n    folds_to_predictors = dict(\n        (int(col.split(\"_\")[-1]), (\n            [],\n            make_train_peptide_hash(df.loc[df[col] == 1])))\n        for col in fold_cols)\n    print(folds_to_predictors)\n    for model in input_predictor.models:\n        training_info = model.fit_info[-1]['training_info']\n        fold_num = training_info['fold_num']\n        assert num_folds == training_info['num_folds']\n        (lst, hash) = folds_to_predictors[fold_num]\n        train_peptide_hash = training_info['train_peptide_hash']\n        numpy.testing.assert_equal(hash, train_peptide_hash)\n        lst.append(model)\n\n    work_items = []\n    for (fold_num, (models, _)) in folds_to_predictors.items():\n        work_items.append({\n            'fold_num': fold_num,\n            'models': models,\n            'min_models': args.min_models_per_fold,\n            'max_models': args.max_models_per_fold,\n        })\n\n    GLOBAL_DATA[\"data\"] = df\n    GLOBAL_DATA[\"input_predictor\"] = input_predictor\n\n    if not os.path.exists(args.out_models_dir):\n        print(\"Attempting to create directory: %s\" % args.out_models_dir)\n        os.mkdir(args.out_models_dir)\n        print(\"Done.\")\n\n    result_predictor = Class1ProcessingPredictor(\n        models=[],\n        metadata_dataframes=metadata_dfs)\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n    worker_pool = None\n    start = time.time()\n    if serial_run:\n        # Serial run\n        print(\"Running in serial.\")\n        results = (model_select(**item) for item in work_items)\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        print(\"Running on cluster.\")\n        results = cluster_results_from_args(\n            args,\n            work_function=model_select,\n            work_items=work_items,\n            constant_data=GLOBAL_DATA,\n            result_serialization_method=\"pickle\")\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n\n        print(\"Processing %d work items in parallel.\" % len(work_items))\n        assert not serial_run\n\n        # Parallel run\n        results = worker_pool.imap_unordered(\n            do_model_select_task,\n            work_items,\n            chunksize=1)\n\n    models_by_fold = {}\n    summary_dfs = []\n    for result in tqdm.tqdm(results, total=len(work_items)):\n        pprint(result)\n        fold_num = result['fold_num']\n        (all_models_for_fold, _) = folds_to_predictors[fold_num]\n        models = [\n            all_models_for_fold[i]\n            for i in result['selected_indices']\n        ]\n        summary_df = result['summary'].copy()\n        summary_df.index = summary_df.index.map(\n            lambda idx: all_models_for_fold[idx])\n        summary_dfs.append(summary_df)\n\n        print(\"Selected %d models for fold %d: %s\" % (\n            len(models), fold_num, result['selected_indices']))\n        models_by_fold[fold_num] = models\n        result_predictor.add_models(models)\n\n    summary_df = pandas.concat(summary_dfs, ignore_index=False)\n    summary_df[\"model_config\"] = summary_df.index.map(lambda m: m.get_config())\n    result_predictor.metadata_dataframes[\"model_selection_summary\"] = (\n        summary_df.reset_index(drop=True))\n\n    result_predictor.save(args.out_models_dir)\n\n    model_selection_time = time.time() - start\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n    print(\"Model selection time %0.2f min.\" % (model_selection_time / 60.0))\n    print(\"Predictor [%d models] written to: %s\" % (\n        len(result_predictor.models), args.out_models_dir))\n\n\ndef do_model_select_task(item, constant_data=GLOBAL_DATA):\n    return model_select(constant_data=constant_data, **item)\n\n\ndef model_select(\n        fold_num, models, min_models, max_models, constant_data=GLOBAL_DATA):\n    \"\"\"\n    Model select for a fold.\n\n    Parameters\n    ----------\n    fold_num : int\n    models : list of Class1NeuralNetwork\n    min_models : int\n    max_models : int\n    constant_data : dict\n\n    Returns\n    -------\n    dict with keys 'fold_num', 'selected_indices', 'summary'\n    \"\"\"\n\n    full_data = constant_data[\"data\"]\n    df = full_data.loc[\n        full_data[\"fold_%d\" % fold_num] == 0\n    ]\n\n    sequences = FlankingEncoding(\n        peptides=df.peptide.values,\n        n_flanks=df.n_flank.values,\n        c_flanks=df.c_flank.values)\n\n    predictions_df = df.copy()\n    for (i, model) in enumerate(models):\n        predictions_df[i] = model.predict_encoded(sequences)\n\n    selected = []\n    selected_score = 0\n    remaining_models = set(numpy.arange(len(models)))\n    individual_model_scores = {}\n    selected_in_round = {}\n    ensemble_score_when_selected = {}\n    while remaining_models and len(selected) < max_models:\n        best_model = None\n        best_model_score = 0\n        for i in remaining_models:\n            possible_ensemble = list(selected) + [i]\n            predictions = predictions_df[possible_ensemble].mean(axis=1)\n            auc_score = roc_auc_score(df.hit.values, predictions.values)\n            if auc_score > best_model_score:\n                best_model = i\n                best_model_score = auc_score\n            if not selected:\n                # First iteration. Store individual model scores.\n                individual_model_scores[i] = auc_score\n        if len(selected) < min_models or best_model_score > selected_score:\n            selected_score = best_model_score\n            remaining_models.remove(best_model)\n            selected.append(best_model)\n            selected_in_round[best_model] = len(selected)\n            ensemble_score_when_selected[best_model] = selected_score\n        else:\n            break\n\n    assert selected\n\n    summary_df = pandas.Series(individual_model_scores)[\n        numpy.arange(len(models))\n    ].to_frame()\n    summary_df.columns = ['auc_score']\n    summary_df[\"selected_in_round\"] = pandas.Series(selected_in_round)\n    summary_df[\"ensemble_score_when_selected\"] = pandas.Series(\n        ensemble_score_when_selected)\n\n    print(summary_df)\n\n    return {\n        'fold_num': fold_num,\n        'selected_indices': selected,\n        'summary': summary_df,  # indexed by model index\n    }\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "mhcflurry/testing_utils.py",
    "content": "\"\"\"\nUtilities used in MHCflurry unit tests.\n\"\"\"\nfrom . import Class1NeuralNetwork\nfrom .common import configure_pytorch\n\n\ndef startup():\n    \"\"\"\n    Configure PyTorch for running unit tests.\n    \"\"\"\n    configure_pytorch(num_threads=2)\n\n\ndef cleanup():\n    \"\"\"\n    Clear PyTorch session and other process-wide resources.\n    \"\"\"\n    Class1NeuralNetwork.clear_model_cache()\n"
  },
  {
    "path": "mhcflurry/train_allele_specific_models_command.py",
    "content": "\"\"\"\nTrain Class1 single allele models.\n\"\"\"\nimport argparse\nimport os\nimport signal\nimport sys\nimport time\nimport traceback\nimport random\nfrom functools import partial\n\nimport pandas\nimport yaml\nfrom sklearn.metrics.pairwise import cosine_similarity\nfrom sklearn.model_selection import StratifiedKFold\nfrom .common import normalize_allele_name\nimport tqdm  # progress bar\n\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .common import configure_logging\nfrom .local_parallelism import (\n    add_local_parallelism_args,\n    worker_pool_with_gpu_assignments_from_args,\n    call_wrapped_kwargs)\nfrom .hyperparameters import HyperparameterDefaults\nfrom .allele_encoding import AlleleEncoding\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n# Note on parallelization:\n# When running in parallel, avoid using the neural network backend in the main\n# process. Model loading and inference should happen in worker processes.\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data\",\n    metavar=\"FILE.csv\",\n    required=True,\n    help=(\n        \"Training data CSV. Expected columns: \"\n        \"allele, peptide, measurement_value\"))\nparser.add_argument(\n    \"--out-models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to write models and manifest\")\nparser.add_argument(\n    \"--hyperparameters\",\n    metavar=\"FILE.json\",\n    required=True,\n    help=\"JSON or YAML of hyperparameters\")\nparser.add_argument(\n    \"--allele\",\n    default=None,\n    nargs=\"+\",\n    help=\"Alleles to train models for. If not specified, all alleles with \"\n    \"enough measurements will be used.\")\nparser.add_argument(\n    \"--min-measurements-per-allele\",\n    type=int,\n    metavar=\"N\",\n    default=50,\n    help=\"Train models for alleles with >=N measurements.\")\nparser.add_argument(\n    \"--held-out-fraction-reciprocal\",\n    type=int,\n    metavar=\"N\",\n    default=None,\n    help=\"Hold out 1/N fraction of data (for e.g. subsequent model selection. \"\n    \"For example, specify 5 to hold out 20 percent of the data.\")\nparser.add_argument(\n    \"--held-out-fraction-seed\",\n    type=int,\n    metavar=\"N\",\n    default=0,\n    help=\"Seed for randomizing which measurements are held out. Only matters \"\n    \"when --held-out-fraction is specified. Default: %(default)s.\")\nparser.add_argument(\n    \"--ignore-inequalities\",\n    action=\"store_true\",\n    default=False,\n    help=\"Do not use affinity value inequalities even when present in data\")\nparser.add_argument(\n    \"--n-models\",\n    type=int,\n    metavar=\"N\",\n    help=\"Ensemble size, i.e. how many models to train for each architecture. \"\n    \"If specified here it overrides any 'n_models' specified in the \"\n    \"hyperparameters.\")\nparser.add_argument(\n    \"--max-epochs\",\n    type=int,\n    metavar=\"N\",\n    help=\"Max training epochs. If specified here it overrides any 'max_epochs' \"\n    \"specified in the hyperparameters.\")\nparser.add_argument(\n    \"--allele-sequences\",\n    metavar=\"FILE.csv\",\n    help=\"Allele sequences file. Used for computing allele similarity matrix.\")\nparser.add_argument(\n    \"--save-interval\",\n    type=float,\n    metavar=\"N\",\n    default=60,\n    help=\"Write models to disk every N seconds. Only affects parallel runs; \"\n    \"serial runs write each model to disk as it is trained.\")\nparser.add_argument(\n    \"--verbosity\",\n    type=int,\n    help=\"Verbosity. Default: %(default)s\",\n    default=0)\n\nadd_local_parallelism_args(parser)\n\nTRAIN_DATA_HYPERPARAMETER_DEFAULTS = HyperparameterDefaults(\n    subset=\"all\",\n    pretrain_min_points=None,\n)\n\n\ndef run(argv=sys.argv[1:]):\n    global GLOBAL_DATA\n\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    args.out_models_dir = os.path.abspath(args.out_models_dir)\n\n    configure_logging(verbose=args.verbosity > 1)\n\n    hyperparameters_lst = yaml.safe_load(open(args.hyperparameters))\n    assert isinstance(hyperparameters_lst, list), hyperparameters_lst\n    print(\"Loaded hyperparameters list: %s\" % str(hyperparameters_lst))\n\n    df = pandas.read_csv(args.data)\n    print(\"Loaded training data: %s\" % (str(df.shape)))\n\n    df = df.loc[\n        (df.peptide.str.len() >= 8) & (df.peptide.str.len() <= 15)\n    ]\n    print(\"Subselected to 8-15mers: %s\" % (str(df.shape)))\n\n    if args.ignore_inequalities and \"measurement_inequality\" in df.columns:\n        print(\"Dropping measurement_inequality column\")\n        del df[\"measurement_inequality\"]\n\n    # Allele counts are in terms of quantitative data only.\n    allele_counts = (\n        df.loc[df.measurement_type == \"quantitative\"].allele.value_counts())\n\n    if args.allele:\n        alleles = [normalize_allele_name(a) for a in args.allele]\n    else:\n        alleles = list(allele_counts.loc[\n            allele_counts > args.min_measurements_per_allele\n        ].index)\n\n    # Allele names in data are assumed to be already normalized.\n    print(\"Selected %d/%d alleles: %s\" % (len(alleles), df.allele.nunique(), ' '.join(alleles)))\n    df = df.loc[df.allele.isin(alleles)].dropna()\n\n    if args.held_out_fraction_reciprocal:\n        df = subselect_df_held_out(\n            df,\n            recriprocal_held_out_fraction=args.held_out_fraction_reciprocal,\n            seed=args.held_out_fraction_seed)\n\n    print(\"Training data: %s\" % (str(df.shape)))\n\n    GLOBAL_DATA[\"train_data\"] = df\n    GLOBAL_DATA[\"args\"] = args\n\n    if not os.path.exists(args.out_models_dir):\n        print(\"Attempting to create directory: %s\" % args.out_models_dir)\n        os.mkdir(args.out_models_dir)\n        print(\"Done.\")\n\n    predictor = Class1AffinityPredictor(\n        metadata_dataframes={\n            'train_data': df,\n        })\n    serial_run = args.num_jobs == 0\n\n    work_items = []\n    for (h, hyperparameters) in enumerate(hyperparameters_lst):\n        n_models = None\n        if 'n_models' in hyperparameters:\n            n_models = hyperparameters.pop(\"n_models\")\n        if args.n_models:\n            n_models = args.n_models\n        if not n_models:\n            raise ValueError(\n                \"Specify --ensemble-size or n_models hyperparameter\")\n\n        if args.max_epochs:\n            hyperparameters['max_epochs'] = args.max_epochs\n\n        hyperparameters['train_data'] = (\n            TRAIN_DATA_HYPERPARAMETER_DEFAULTS.with_defaults(\n                hyperparameters.get('train_data', {})))\n\n        if hyperparameters['train_data']['pretrain_min_points'] and (\n                'allele_similarity_matrix' not in GLOBAL_DATA):\n            print(\"Generating allele similarity matrix.\")\n            if not args.allele_sequences:\n                parser.error(\n                    \"Allele sequences required when using pretrain_min_points\")\n            allele_sequences = pandas.read_csv(\n                args.allele_sequences,\n                index_col=\"allele\")\n            print(\"Read %d allele sequences\" % len(allele_sequences))\n            allele_sequences = allele_sequences.loc[\n                allele_sequences.index.isin(df.allele.unique())\n            ]\n            print(\"Allele sequences matching train data: %d\" % len(allele_sequences))\n            blosum_encoding = (\n                AlleleEncoding(\n                    allele_sequences.index.values,\n                    allele_sequences.pseudosequence.to_dict())\n                .fixed_length_vector_encoded_sequences(\"BLOSUM62\"))\n            allele_similarity_matrix = pandas.DataFrame(\n                cosine_similarity(\n                    blosum_encoding.reshape((len(allele_sequences), -1))),\n                index=allele_sequences.index.values,\n                columns=allele_sequences.index.values)\n            GLOBAL_DATA['allele_similarity_matrix'] = allele_similarity_matrix\n            print(\"Computed allele similarity matrix\")\n            print(allele_similarity_matrix)\n\n        for (i, allele) in enumerate(df.allele.unique()):\n            for model_num in range(n_models):\n                work_dict = {\n                    'n_models': 1,\n                    'allele_num': i,\n                    'n_alleles': len(alleles),\n                    'hyperparameter_set_num': h,\n                    'num_hyperparameter_sets': len(hyperparameters_lst),\n                    'allele': allele,\n                    'hyperparameters': hyperparameters,\n                    'verbose': args.verbosity,\n                    'progress_print_interval': None if not serial_run else 5.0,\n                    'predictor': predictor if serial_run else None,\n                    'save_to': args.out_models_dir if serial_run else None,\n                }\n                work_items.append(work_dict)\n\n    start = time.time()\n\n    worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n\n    if worker_pool:\n        print(\"Processing %d work items in parallel.\" % len(work_items))\n\n        # The estimated time to completion is more accurate if we randomize\n        # the order of the work.\n        random.shuffle(work_items)\n\n        for item in work_items:\n            item['constant_data'] = GLOBAL_DATA\n\n        results_generator = worker_pool.imap_unordered(\n            partial(call_wrapped_kwargs, train_model),\n            work_items,\n            chunksize=1)\n\n        unsaved_predictors = []\n        last_save_time = time.time()\n        for new_predictor in tqdm.tqdm(results_generator, total=len(work_items)):\n            unsaved_predictors.append(new_predictor)\n\n            if time.time() > last_save_time + args.save_interval:\n                # Save current predictor.\n                save_start = time.time()\n                new_model_names = predictor.merge_in_place(unsaved_predictors)\n                predictor.save(\n                    args.out_models_dir,\n                    model_names_to_write=new_model_names,\n                    write_metadata=False)\n                print(\n                    \"Saved predictor (%d models total) including %d new models \"\n                    \"in %0.2f sec to %s\" % (\n                        len(predictor.neural_networks),\n                        len(new_model_names),\n                        time.time() - save_start,\n                        args.out_models_dir))\n                unsaved_predictors = []\n                last_save_time = time.time()\n\n        predictor.merge_in_place(unsaved_predictors)\n\n    else:\n        # Run in serial. In this case, every worker is passed the same predictor,\n        # which it adds models to, so no merging is required. It also saves\n        # as it goes so no saving is required at the end.\n        for _ in tqdm.trange(len(work_items)):\n            item = work_items.pop(0)  # want to keep freeing up memory\n            work_predictor = train_model(**item)\n            assert work_predictor is predictor\n        assert not work_items\n\n    print(\"Saving final predictor to: %s\" % args.out_models_dir)\n    predictor.save(args.out_models_dir)  # write all models just to be sure\n    print(\"Done.\")\n\n    print(\"*\" * 30)\n    training_time = time.time() - start\n    print(\"Trained affinity predictor with %d networks in %0.2f min.\" % (\n        len(predictor.neural_networks), training_time / 60.0))\n    print(\"*\" * 30)\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n    print(\"Predictor written to: %s\" % args.out_models_dir)\n\n\ndef alleles_by_similarity(allele):\n    global GLOBAL_DATA\n    allele_similarity = GLOBAL_DATA['allele_similarity_matrix']\n    if allele not in allele_similarity.columns:\n        # Use random alleles\n        print(\"No similar alleles for: %s\" % allele)\n        return [allele] + list(\n            allele_similarity.columns.to_series().sample(frac=1.0))\n    return (\n        allele_similarity[allele] + (\n            allele_similarity.index == allele)  # force specified allele first\n    ).sort_values(ascending=False).index.tolist()\n\n\ndef train_model(\n        n_models,\n        allele_num,\n        n_alleles,\n        hyperparameter_set_num,\n        num_hyperparameter_sets,\n        allele,\n        hyperparameters,\n        verbose,\n        progress_print_interval,\n        predictor,\n        save_to,\n        constant_data=GLOBAL_DATA):\n\n    if predictor is None:\n        predictor = Class1AffinityPredictor()\n\n    pretrain_min_points = hyperparameters['train_data']['pretrain_min_points']\n\n    data = constant_data[\"train_data\"]\n\n    subset = hyperparameters.get(\"train_data\", {}).get(\"subset\", \"all\")\n    if subset == \"quantitative\":\n        data = data.loc[\n            data.measurement_type == \"quantitative\"\n        ]\n    elif subset == \"all\":\n        pass\n    else:\n        raise ValueError(\"Unsupported subset: %s\" % subset)\n\n    data_size_by_allele = data.allele.value_counts()\n\n    if pretrain_min_points:\n        similar_alleles = alleles_by_similarity(allele)\n        alleles = []\n        while not alleles or data_size_by_allele.loc[alleles].sum() < pretrain_min_points:\n            alleles.append(similar_alleles.pop(0))\n        data = data.loc[data.allele.isin(alleles)]\n        assert len(data) >= pretrain_min_points, (len(data), pretrain_min_points)\n        train_rounds = (data.allele == allele).astype(int).values\n    else:\n        train_rounds = None\n        data = data.loc[data.allele == allele]\n\n    progress_preamble = (\n        \"[%2d / %2d hyperparameters] \"\n        \"[%4d / %4d alleles] %s \" % (\n            hyperparameter_set_num + 1,\n            num_hyperparameter_sets,\n            allele_num + 1,\n            n_alleles,\n            allele))\n\n    train_data = data.sample(frac=1.0)\n    predictor.fit_allele_specific_predictors(\n        n_models=n_models,\n        architecture_hyperparameters_list=[hyperparameters],\n        allele=allele,\n        peptides=train_data.peptide.values,\n        affinities=train_data.measurement_value.values,\n        inequalities=(\n            train_data.measurement_inequality.values\n            if \"measurement_inequality\" in train_data.columns else None),\n        train_rounds=train_rounds,\n        models_dir_for_save=save_to,\n        progress_preamble=progress_preamble,\n        progress_print_interval=progress_print_interval,\n        verbose=verbose)\n\n    return predictor\n\n\ndef subselect_df_held_out(df, recriprocal_held_out_fraction=10, seed=0):\n    df = df.copy()\n    df[\"allele_peptide\"] = df.allele + \"_\" + df.peptide\n\n    kf = StratifiedKFold(\n        n_splits=recriprocal_held_out_fraction,\n        shuffle=True,\n        random_state=seed)\n\n    # Stratify by both allele and binder vs. nonbinder.\n    df[\"key\"] = [\n        \"%s_%s\" % (\n            row.allele,\n            \"binder\" if row.measurement_value <= 500 else \"nonbinder\")\n        for (_, row) in df.iterrows()\n    ]\n\n    (train, test) = next(kf.split(df, df.key))\n    selected_allele_peptides = df.iloc[train].allele_peptide.unique()\n    result_df = df.loc[\n        df.allele_peptide.isin(selected_allele_peptides)\n    ]\n    del result_df[\"allele_peptide\"]\n    return result_df\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "mhcflurry/train_pan_allele_models_command.py",
    "content": "\"\"\"\nTrain Class1 pan-allele models.\n\"\"\"\nimport argparse\nimport os\nfrom os.path import join\nimport signal\nimport sys\nimport time\nimport traceback\nimport random\nimport pprint\nimport hashlib\nimport pickle\nimport uuid\nfrom functools import partial\n\nimport numpy\nimport pandas\nimport yaml\nimport tqdm  # progress bar\n\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .class1_neural_network import Class1NeuralNetwork\nfrom .common import configure_logging, normalize_allele_name\nfrom .local_parallelism import (\n    add_local_parallelism_args,\n    worker_pool_with_gpu_assignments_from_args,\n    call_wrapped_kwargs)\nfrom .cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\nfrom .allele_encoding import AlleleEncoding\nfrom .encodable_sequences import EncodableSequences\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n# Note on parallelization:\n# When running in parallel, avoid using the neural network backend in the main\n# process. Model loading and inference should happen in worker processes.\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data\",\n    metavar=\"FILE.csv\",\n    help=(\n        \"Training data CSV. Expected columns: \"\n        \"allele, peptide, measurement_value\"))\nparser.add_argument(\n    \"--pretrain-data\",\n    metavar=\"FILE.csv\",\n    help=(\n        \"Pre-training data CSV. Expected columns: \"\n        \"allele, peptide, measurement_value\"))\nparser.add_argument(\n    \"--out-models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to write models and manifest\")\nparser.add_argument(\n    \"--hyperparameters\",\n    metavar=\"FILE.json\",\n    help=\"JSON or YAML of hyperparameters\")\nparser.add_argument(\n    \"--held-out-measurements-per-allele-fraction-and-max\",\n    type=float,\n    metavar=\"X\",\n    nargs=2,\n    default=[0.25, 100],\n    help=\"Fraction of measurements per allele to hold out, and maximum number\")\nparser.add_argument(\n    \"--ignore-inequalities\",\n    action=\"store_true\",\n    default=False,\n    help=\"Do not use affinity value inequalities even when present in data\")\nparser.add_argument(\n    \"--num-folds\",\n    type=int,\n    default=4,\n    metavar=\"N\",\n    help=\"Number of training folds.\")\nparser.add_argument(\n    \"--num-replicates\",\n    type=int,\n    metavar=\"N\",\n    default=1,\n    help=\"Number of replicates per (architecture, fold) pair to train.\")\nparser.add_argument(\n    \"--max-epochs\",\n    type=int,\n    metavar=\"N\",\n    help=\"Max training epochs. If specified here it overrides any 'max_epochs' \"\n    \"specified in the hyperparameters.\")\nparser.add_argument(\n    \"--allele-sequences\",\n    metavar=\"FILE.csv\",\n    help=\"Allele sequences file.\")\nparser.add_argument(\n    \"--verbosity\",\n    type=int,\n    help=\"Verbosity. Default: %(default)s\",\n    default=0)\nparser.add_argument(\n    \"--debug\",\n    action=\"store_true\",\n    default=False,\n    help=\"Launch python debugger on error\")\nparser.add_argument(\n    \"--continue-incomplete\",\n    action=\"store_true\",\n    default=False,\n    help=\"Continue training models from an incomplete training run. If this is \"\n    \"specified then the only required argument is --out-models-dir\")\nparser.add_argument(\n    \"--only-initialize\",\n    action=\"store_true\",\n    default=False,\n    help=\"Do not actually train models. The initialized run can be continued \"\n    \"later with --continue-incomplete.\")\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\n\ndef assign_folds(df, num_folds, held_out_fraction, held_out_max):\n    \"\"\"\n    Split training data into multple test/train pairs, which we refer to as\n    folds. Note that a given data point may be assigned to multiple test or\n    train sets; these folds are NOT a non-overlapping partition as used in cross\n    validation.\n\n    A fold is defined by a boolean value for each data point, indicating whether\n    it is included in the training data for that fold. If it's not in the\n    training data, then it's in the test data.\n\n    Folds are balanced in terms of allele content.\n\n    Parameters\n    ----------\n    df : pandas.DataFrame\n        training data\n    num_folds : int\n    held_out_fraction : float\n        Fraction of data to hold out as test data in each fold\n    held_out_max\n        For a given allele, do not hold out more than held_out_max number of\n        data points in any fold.\n\n    Returns\n    -------\n    pandas.DataFrame\n        index is same as df.index, columns are \"fold_0\", ... \"fold_N\" giving\n        whether the data point is in the training data for the fold\n    \"\"\"\n    result_df = pandas.DataFrame(index=df.index)\n\n    for fold in range(num_folds):\n        result_df[\"fold_%d\" % fold] = True\n        for (allele, sub_df) in df.groupby(\"allele\"):\n            medians = sub_df.groupby(\"peptide\").measurement_value.median()\n\n            low_peptides = medians[medians < medians.median()].index.values\n            high_peptides = medians[medians >= medians.median()].index.values\n\n            held_out_count = int(\n                min(len(medians) * held_out_fraction, held_out_max))\n\n            held_out_peptides = set()\n            if held_out_count == 0:\n                pass\n            elif held_out_count < 2:\n                held_out_peptides = set(\n                    medians.index.to_series().sample(n=held_out_count))\n            else:\n                held_out_low_count = min(\n                    len(low_peptides),\n                    int(held_out_count / 2))\n                held_out_high_count = min(\n                    len(high_peptides),\n                    held_out_count - held_out_low_count)\n\n                held_out_low = pandas.Series(low_peptides).sample(\n                    n=held_out_low_count) if held_out_low_count else set()\n                held_out_high = pandas.Series(high_peptides).sample(\n                    n=held_out_high_count) if held_out_high_count else set()\n                held_out_peptides = set(held_out_low).union(set(held_out_high))\n\n            result_df.loc[\n                sub_df.index[sub_df.peptide.isin(held_out_peptides)],\n                \"fold_%d\" % fold\n            ] = False\n\n    print(\"Training points per fold\")\n    print(result_df.sum())\n\n    print(\"Test points per fold\")\n    print((~result_df).sum())\n    return result_df\n\n\ndef pretrain_data_iterator(\n        filename,\n        master_allele_encoding,\n        peptides_per_chunk=1024):\n    \"\"\"\n    Step through a CSV file giving predictions for a large number of peptides\n    (rows) and alleles (columns).\n\n    Parameters\n    ----------\n    filename : string\n    master_allele_encoding : AlleleEncoding\n    peptides_per_chunk : int\n\n    Returns\n    -------\n    Generator of (AlleleEncoding, EncodableSequences, float affinities) tuples\n\n    \"\"\"\n    empty = pandas.read_csv(filename, index_col=0, nrows=0)\n    empty.columns = empty.columns.map(normalize_allele_name)\n    print(\"Pretrain alleles available: \", *empty.columns.values)\n    usable_alleles = [\n        c for c in empty.columns\n        if c in master_allele_encoding.allele_to_sequence\n    ]\n    print(\"Using %d / %d alleles\" % (len(usable_alleles), len(empty.columns)))\n    print(\"Skipped alleles: \", [\n        c for c in empty.columns\n        if c not in master_allele_encoding.allele_to_sequence\n    ])\n\n    allele_encoding = AlleleEncoding(\n        numpy.tile(usable_alleles, peptides_per_chunk),\n        borrow_from=master_allele_encoding)\n\n    while True:\n        synthetic_iter = pandas.read_csv(\n            filename, index_col=0, chunksize=peptides_per_chunk)\n        for (k, df) in enumerate(synthetic_iter):\n            if len(df) != peptides_per_chunk:\n                continue\n\n            df.columns = empty.columns\n            df = df[usable_alleles]\n            encodable_peptides = EncodableSequences(\n                numpy.repeat(\n                    df.index.values,\n                    len(usable_alleles)))\n\n            yield (allele_encoding, encodable_peptides, df.stack().values)\n\n\ndef run(argv=sys.argv[1:]):\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    if args.debug:\n        try:\n            return main(args)\n        except Exception as e:\n            print(e)\n            import ipdb  # pylint: disable=import-error\n            ipdb.set_trace()\n            raise\n    else:\n        return main(args)\n\n\ndef main(args):\n    print(\"Arguments:\")\n    print(args)\n\n    args.out_models_dir = os.path.abspath(args.out_models_dir)\n    configure_logging(verbose=args.verbosity > 1)\n\n    if not args.continue_incomplete:\n        initialize_training(args)\n\n    if not args.only_initialize:\n        train_models(args)\n\n\ndef initialize_training(args):\n    required_arguments = [\n        \"data\",\n        \"out_models_dir\",\n        \"hyperparameters\",\n        \"num_folds\",\n    ]\n    for arg in required_arguments:\n        if getattr(args, arg) is None:\n            parser.error(\"Missing required arg: %s\" % arg)\n\n    print(\"Initializing training.\")\n    hyperparameters_lst = yaml.safe_load(open(args.hyperparameters))\n    assert isinstance(hyperparameters_lst, list)\n    print(\"Loaded hyperparameters list:\")\n    pprint.pprint(hyperparameters_lst)\n\n    allele_sequences = pandas.read_csv(\n        args.allele_sequences, index_col=0).iloc[:,0]\n\n    df = pandas.read_csv(args.data)\n    print(\"Loaded training data: %s\" % (str(df.shape)))\n    df = df.loc[\n        (df.peptide.str.len() >= 8) & (df.peptide.str.len() <= 15)\n    ]\n    print(\"Subselected to 8-15mers: %s\" % (str(df.shape)))\n\n    df = df.loc[~df.measurement_value.isnull()]\n    print(\"Dropped NaNs: %s\" % (str(df.shape)))\n\n    df = df.loc[df.allele.isin(allele_sequences.index)]\n    print(\"Subselected to alleles with sequences: %s\" % (str(df.shape)))\n\n    print(\"Data inequalities:\")\n    print(df.measurement_inequality.value_counts())\n\n    if args.ignore_inequalities and \"measurement_inequality\" in df.columns:\n        print(\"Dropping measurement_inequality column\")\n        del df[\"measurement_inequality\"]\n    # Allele names in data are assumed to be already normalized.\n    print(\"Training data: %s\" % (str(df.shape)))\n\n    (held_out_fraction, held_out_max) = (\n        args.held_out_measurements_per_allele_fraction_and_max)\n\n    folds_df = assign_folds(\n        df=df,\n        num_folds=args.num_folds,\n        held_out_fraction=held_out_fraction,\n        held_out_max=held_out_max)\n\n    allele_sequences_in_use = allele_sequences[\n        allele_sequences.index.isin(df.allele)\n    ]\n    print(\"Will use %d / %d allele sequences\" % (\n        len(allele_sequences_in_use), len(allele_sequences)))\n\n    # All alleles, not just those with training data.\n    full_allele_encoding = AlleleEncoding(\n        alleles=allele_sequences.index.values,\n        allele_to_sequence=allele_sequences.to_dict()\n    )\n\n    # Only alleles with training data. For efficiency we perform model training\n    # using only these alleles in the neural network embedding layer.\n    allele_encoding = AlleleEncoding(\n        alleles=allele_sequences_in_use.index.values,\n        allele_to_sequence=allele_sequences_in_use.to_dict())\n\n    if not os.path.exists(args.out_models_dir):\n        print(\"Attempting to create directory: %s\" % args.out_models_dir)\n        os.mkdir(args.out_models_dir)\n        print(\"Done.\")\n\n    predictor = Class1AffinityPredictor(\n        allele_to_sequence=allele_encoding.allele_to_sequence,\n        metadata_dataframes={\n            'train_data': pandas.merge(\n                df,\n                folds_df,\n                left_index=True,\n                right_index=True)\n        })\n\n    work_items = []\n    for (h, hyperparameters) in enumerate(hyperparameters_lst):\n        if 'n_models' in hyperparameters:\n            raise ValueError(\"n_models is unsupported\")\n\n        if args.max_epochs:\n            hyperparameters['max_epochs'] = args.max_epochs\n\n        if hyperparameters.get(\"train_data\", {}).get(\"pretrain\", False):\n            if not args.pretrain_data:\n                raise ValueError(\"--pretrain-data is required\")\n\n        for fold in range(args.num_folds):\n            for replicate in range(args.num_replicates):\n                work_dict = {\n                    'work_item_name': str(uuid.uuid4()),\n                    'architecture_num': h,\n                    'num_architectures': len(hyperparameters_lst),\n                    'fold_num': fold,\n                    'num_folds': args.num_folds,\n                    'replicate_num': replicate,\n                    'num_replicates': args.num_replicates,\n                    'hyperparameters': hyperparameters,\n                    'pretrain_data_filename': args.pretrain_data,\n                }\n                work_items.append(work_dict)\n\n    training_init_info = {}\n    training_init_info[\"train_data\"] = df\n    training_init_info[\"folds_df\"] = folds_df\n    training_init_info[\"allele_encoding\"] = allele_encoding\n    training_init_info[\"full_allele_encoding\"] = full_allele_encoding\n    training_init_info[\"work_items\"] = work_items\n\n    # Save empty predictor (for metadata)\n    predictor.save(args.out_models_dir)\n\n    # Write training_init_info.\n    with open(join(args.out_models_dir, \"training_init_info.pkl\"), \"wb\") as fd:\n        pickle.dump(training_init_info, fd, protocol=pickle.HIGHEST_PROTOCOL)\n\n    print(\"Done initializing training.\")\n\n\ndef train_models(args):\n    global GLOBAL_DATA\n\n    print(\"Beginning training.\")\n    predictor = Class1AffinityPredictor.load(\n        args.out_models_dir, optimization_level=0)\n    print(\"Loaded predictor with %d networks\" % len(predictor.neural_networks))\n\n    with open(join(args.out_models_dir, \"training_init_info.pkl\"), \"rb\") as fd:\n        GLOBAL_DATA.update(pickle.load(fd))\n    print(\"Loaded training init info.\")\n\n    all_work_items = GLOBAL_DATA[\"work_items\"]\n    complete_work_item_names = [\n        network.fit_info[-1][\"training_info\"][\"work_item_name\"] for network in\n        predictor.neural_networks\n    ]\n    work_items = [\n        item for item in all_work_items\n        if item[\"work_item_name\"] not in complete_work_item_names\n    ]\n    print(\"Found %d work items, of which %d are incomplete and will run now.\" % (\n        len(all_work_items), len(work_items)))\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n\n    # The estimated time to completion is more accurate if we randomize\n    # the order of the work.\n    random.shuffle(work_items)\n    for (work_item_num, item) in enumerate(work_items):\n        item['work_item_num'] = work_item_num\n        item['num_work_items'] = len(work_items)\n        item['progress_print_interval'] = 60.0 if not serial_run else 5.0\n        item['predictor'] = predictor if serial_run else None\n        item['save_to'] = args.out_models_dir if serial_run else None\n        item['verbose'] = args.verbosity\n        if args.pretrain_data:\n            item['pretrain_data_filename'] = args.pretrain_data\n\n    start = time.time()\n\n    worker_pool = None\n    if serial_run:\n        # Run in serial. Every worker is passed the same predictor,\n        # which it adds models to, so no merging is required. It also saves\n        # as it goes so no saving is required at the end.\n        print(\"Processing %d work items in serial.\" % len(work_items))\n        for _ in tqdm.trange(len(work_items)):\n            item = work_items.pop(0)  # want to keep freeing up memory\n            work_predictor = train_model(**item)\n            assert work_predictor is predictor\n        assert not work_items\n        results_generator = None\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        results_generator = cluster_results_from_args(\n            args,\n            work_function=train_model,\n            work_items=work_items,\n            constant_data=GLOBAL_DATA,\n            result_serialization_method=\"save_predictor\")\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n\n        print(\"Processing %d work items in parallel.\" % len(work_items))\n        assert not serial_run\n\n        for item in work_items:\n            item['constant_data'] = GLOBAL_DATA\n\n        results_generator = worker_pool.imap_unordered(\n            partial(call_wrapped_kwargs, train_model),\n            work_items,\n            chunksize=1)\n\n    if results_generator:\n        for new_predictor in tqdm.tqdm(results_generator, total=len(work_items)):\n            save_start = time.time()\n            (new_model_name,) = predictor.merge_in_place([new_predictor])\n            predictor.save(\n                args.out_models_dir,\n                model_names_to_write=[new_model_name],\n                write_metadata=False)\n            print(\n                \"Saved predictor (%d models total) with 1 new models\"\n                \"in %0.2f sec to %s\" % (\n                    len(predictor.neural_networks),\n                    time.time() - save_start,\n                    args.out_models_dir))\n\n    # We want the final predictor to support all alleles with sequences, not\n    # just those we actually used for model training.\n    predictor.allele_to_sequence = (\n        GLOBAL_DATA['full_allele_encoding'].allele_to_sequence)\n    predictor.clear_cache()\n    predictor.save(args.out_models_dir)\n    print(\"Done.\")\n\n    print(\"*\" * 30)\n    training_time = time.time() - start\n    print(\"Trained affinity predictor with %d networks in %0.2f min.\" % (\n        len(predictor.neural_networks), training_time / 60.0))\n    print(\"*\" * 30)\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n    print(\"Predictor written to: %s\" % args.out_models_dir)\n\n\ndef train_model(\n        work_item_name,\n        work_item_num,\n        num_work_items,\n        architecture_num,\n        num_architectures,\n        fold_num,\n        num_folds,\n        replicate_num,\n        num_replicates,\n        hyperparameters,\n        pretrain_data_filename,\n        verbose,\n        progress_print_interval,\n        predictor,\n        save_to,\n        constant_data=GLOBAL_DATA):\n\n    df = constant_data[\"train_data\"]\n    folds_df = constant_data[\"folds_df\"]\n    allele_encoding = constant_data[\"allele_encoding\"]\n\n    if predictor is None:\n        predictor = Class1AffinityPredictor(\n            allele_to_sequence=allele_encoding.allele_to_sequence)\n\n    numpy.testing.assert_equal(len(df), len(folds_df))\n\n    train_data = df.loc[\n        folds_df[\"fold_%d\" % fold_num]\n    ].sample(frac=1.0)\n\n    train_peptides = EncodableSequences(train_data.peptide.values)\n    train_alleles = AlleleEncoding(\n        train_data.allele.values, borrow_from=allele_encoding)\n\n    progress_preamble = (\n        \"[task %2d / %2d]: \"\n        \"[%2d / %2d folds] \"\n        \"[%2d / %2d architectures] \"\n        \"[%4d / %4d replicates] \" % (\n            work_item_num + 1,\n            num_work_items,\n            fold_num + 1,\n            num_folds,\n            architecture_num + 1,\n            num_architectures,\n            replicate_num + 1,\n            num_replicates))\n\n    print(\"%s [pid %d]. Hyperparameters:\" % (progress_preamble, os.getpid()))\n    pprint.pprint(hyperparameters)\n\n    train_params = dict(hyperparameters.get(\"train_data\", {}))\n\n    def get_train_param(param, default):\n        if param in train_params:\n            result = train_params.pop(param)\n            if verbose:\n                print(\"Train param\", param, \"=\", result)\n        else:\n            result = default\n            if verbose:\n                print(\"Train param\", param, \"=\", result, \"[default]\")\n        return result\n\n\n    def progress_callback():\n        import torch\n        if torch.cuda.is_available():\n            mem = torch.cuda.memory_allocated() / 10**9\n            print(\"Current used GPU memory: \", mem, \"gb\")\n\n    if get_train_param(\"pretrain\", False):\n        pretrain_patience = get_train_param(\"pretrain_patience\", 10)\n        pretrain_min_delta = get_train_param(\"pretrain_min_delta\", 0.0)\n        pretrain_steps_per_epoch = get_train_param(\n            \"pretrain_steps_per_epoch\", 10)\n        pretrain_max_epochs = get_train_param(\"pretrain_max_epochs\", 1000)\n        pretrain_min_epochs = get_train_param(\"pretrain_min_epochs\", 0)\n        pretrain_peptides_per_step = get_train_param(\n            \"pretrain_peptides_per_step\", 1024)\n        max_val_loss = get_train_param(\"pretrain_max_val_loss\", None)\n\n        if verbose:\n            print(\"Unused train params\", train_params)\n\n        attempt = 0\n        while True:\n            attempt += 1\n            print(\"Pre-training attempt %d\" % attempt)\n            if attempt > 10:\n                print(\"Too many pre-training attempts! Stopping pretraining.\")\n                break\n\n            model = Class1NeuralNetwork(**hyperparameters)\n            assert model.network() is None\n            generator = pretrain_data_iterator(\n                pretrain_data_filename,\n                allele_encoding,\n                peptides_per_chunk=pretrain_peptides_per_step)\n\n            model.fit_generator(\n                generator,\n                validation_peptide_encoding=train_peptides,\n                validation_affinities=train_data.measurement_value.values,\n                validation_allele_encoding=train_alleles,\n                validation_inequalities=train_data.measurement_inequality.values,\n                patience=pretrain_patience,\n                min_delta=pretrain_min_delta,\n                steps_per_epoch=pretrain_steps_per_epoch,\n                epochs=pretrain_max_epochs,\n                min_epochs=pretrain_min_epochs,\n                verbose=verbose,\n                progress_callback=progress_callback,\n                progress_preamble=progress_preamble + \"PRETRAIN\",\n                progress_print_interval=progress_print_interval,\n            )\n            model.fit_info[-1].setdefault(\n                \"training_info\", {})[\"pretrain_attempt\"] = attempt\n            if not max_val_loss:\n                break\n            final_val_loss = model.fit_info[-1][\"val_loss\"][-1]\n            if final_val_loss >= max_val_loss:\n                print(\"Val loss %f >= max val loss %f. Pre-training again.\" % (\n                    final_val_loss, max_val_loss))\n            else:\n                print(\"Val loss %f < max val loss %f. Done pre-training.\" % (\n                    final_val_loss, max_val_loss))\n                break\n\n        # Use a smaller learning rate for training on real data\n        learning_rate = model.fit_info[-1][\"learning_rate\"]\n        model.hyperparameters['learning_rate'] = learning_rate / 10\n    else:\n        model = Class1NeuralNetwork(**hyperparameters)\n\n    model.fit(\n        peptides=train_peptides,\n        affinities=train_data.measurement_value.values,\n        allele_encoding=train_alleles,\n        inequalities=(\n            train_data.measurement_inequality.values\n            if \"measurement_inequality\" in train_data.columns else None),\n        progress_preamble=progress_preamble,\n        progress_callback=progress_callback,\n        progress_print_interval=progress_print_interval,\n        verbose=verbose)\n\n    # Save model-specific training info\n    train_peptide_hash = hashlib.sha1()\n    for peptide in sorted(train_data.peptide.values):\n        train_peptide_hash.update(peptide.encode())\n\n    model.fit_info[-1].setdefault(\"training_info\", {}).update({\n        \"fold_num\": fold_num,\n        \"num_folds\": num_folds,\n        \"replicate_num\": replicate_num,\n        \"num_replicates\": num_replicates,\n        \"architecture_num\": architecture_num,\n        \"num_architectures\": num_architectures,\n        \"train_peptide_hash\": train_peptide_hash.hexdigest(),\n        \"work_item_name\": work_item_name,\n    })\n\n    numpy.testing.assert_equal(\n        predictor.manifest_df.shape[0], len(predictor.class1_pan_allele_models))\n    predictor.add_pan_allele_model(model, models_dir_for_save=save_to)\n    numpy.testing.assert_equal(\n        predictor.manifest_df.shape[0], len(predictor.class1_pan_allele_models))\n    predictor.clear_cache()\n\n    # Delete the network to release memory\n    model.clear_allele_representations()\n    model.update_network_description()  # save weights and config\n    model._network = None  # release network to free memory\n    return predictor\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "mhcflurry/train_presentation_models_command.py",
    "content": "\"\"\"\nTrain Class1 presentation models.\n\"\"\"\nimport argparse\nimport os\nimport signal\nimport sys\nimport time\nimport traceback\n\nimport pandas\nimport tqdm  # progress bar\n\nfrom .class1_processing_predictor import Class1ProcessingPredictor\nfrom .class1_affinity_predictor import Class1AffinityPredictor\nfrom .class1_presentation_predictor import Class1PresentationPredictor\nfrom .common import configure_logging\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data\",\n    metavar=\"FILE.csv\",\n    help=\"Training data CSV. Expected columns: peptide, n_flank, c_flank, hit\")\nparser.add_argument(\n    \"--out-models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to write models and manifest\")\nparser.add_argument(\n    \"--affinity-predictor\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Affinity predictor models dir\")\nparser.add_argument(\n    \"--processing-predictor-with-flanks\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Processing predictor with flanks\")\nparser.add_argument(\n    \"--processing-predictor-without-flanks\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Processing predictor without flanks\")\nparser.add_argument(\n    \"--verbosity\",\n    type=int,\n    help=\"Default: %(default)s\",\n    default=1)\nparser.add_argument(\n    \"--debug\",\n    action=\"store_true\",\n    default=False,\n    help=\"Launch python debugger on error\")\nparser.add_argument(\n    \"--hla-column\",\n    default=\"hla\",\n    help=\"Column in data giving space-separated MHC I alleles\")\nparser.add_argument(\n    \"--target-column\",\n    default=\"hit\",\n    help=\"Column in data giving hit (1) vs decoy (0)\")\n\ndef run(argv=sys.argv[1:]):\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    if args.debug:\n        try:\n            return main(args)\n        except Exception as e:\n            print(e)\n            import ipdb  # pylint: disable=import-error\n            ipdb.set_trace()\n            raise\n    else:\n        return main(args)\n\n\ndef main(args):\n    print(\"Arguments:\")\n    print(args)\n\n    args.out_models_dir = os.path.abspath(args.out_models_dir)\n    configure_logging(verbose=args.verbosity > 1)\n\n    df = pandas.read_csv(args.data)\n    print(\"Loaded training data: %s\" % (str(df.shape)))\n    df = df.loc[\n        (df.peptide.str.len() >= 8) & (df.peptide.str.len() <= 15)\n    ]\n    print(\"Subselected to 8-15mers: %s\" % (str(df.shape)))\n\n    df[\"experiment_id\"] = df[args.hla_column]\n    experiment_to_alleles = dict((\n        key, key.split()) for key in df.experiment_id.unique())\n\n    if not os.path.exists(args.out_models_dir):\n        print(\"Attempting to create directory: %s\" % args.out_models_dir)\n        os.mkdir(args.out_models_dir)\n        print(\"Done.\")\n\n    affinity_predictor = Class1AffinityPredictor.load(\n        args.affinity_predictor,\n        optimization_level=0)\n    processing_predictor_with_flanks = Class1ProcessingPredictor.load(\n        args.processing_predictor_with_flanks)\n    processing_predictor_without_flanks = Class1ProcessingPredictor.load(\n        args.processing_predictor_without_flanks)\n\n    print(\"Loaded affinity predictor\", affinity_predictor)\n    print(\n        \"Loaded processing_predictor_with_flanks\",\n        processing_predictor_with_flanks)\n    print(\"Loaded processing_predictor_without_flanks\",\n        processing_predictor_without_flanks)\n\n    predictor = Class1PresentationPredictor(\n        affinity_predictor=affinity_predictor,\n        processing_predictor_with_flanks=processing_predictor_with_flanks,\n        processing_predictor_without_flanks=processing_predictor_without_flanks)\n\n    # We want to predict using an optimized Class1AffinityPredictor but\n    # save the presentation models using an un-optimized Class1AffinityPredictor,\n    # since the optimized (merged) network is only needed at inference time.\n    print(\"Before fit: saving affinity and processing predictors.\")\n    predictor.save(\n        args.out_models_dir,\n        write_affinity_predictor = True,\n        write_processing_predictor = True,\n        write_weights = False,\n        write_percent_ranks = False,\n        write_info = False,\n        write_metdata = False)\n    print(\"Done writing: \", args.out_models_dir)\n\n    print(\"Optimizing affinity predictor.\")\n    optimized = affinity_predictor.optimize()\n    print(\"Optimization performed: \", optimized)\n\n    print(\"Fitting.\")\n    start = time.time()\n    predictor.fit(\n        targets=df[args.target_column].values,\n        peptides=df.peptide.values,\n        alleles=experiment_to_alleles,\n        sample_names=df.experiment_id,\n        n_flanks=df.n_flank.values,\n        c_flanks=df.c_flank.values,\n        verbose=args.verbosity)\n    print(\"Done fitting in\", time.time() - start, \"seconds\")\n\n    print(\"Saving weights and metadata.\")\n    predictor.save(\n        args.out_models_dir,\n        write_affinity_predictor = False,\n        write_processing_predictor = False,\n        write_weights = True,\n        write_percent_ranks = True,\n        write_info = True,\n        write_metdata = True)\n    print(\"Wrote\", args.out_models_dir)\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "mhcflurry/train_processing_models_command.py",
    "content": "\"\"\"\nTrain Class1 processing models.\n\"\"\"\nimport argparse\nimport os\nfrom os.path import join\nimport signal\nimport sys\nimport time\nimport traceback\nimport random\nimport pprint\nimport hashlib\nimport pickle\nimport uuid\nfrom functools import partial\n\nimport numpy\nimport pandas\nimport yaml\nimport tqdm  # progress bar\n\nfrom .class1_processing_predictor import Class1ProcessingPredictor\nfrom .class1_processing_neural_network import Class1ProcessingNeuralNetwork\nfrom .common import configure_logging\nfrom .local_parallelism import (\n    add_local_parallelism_args,\n    worker_pool_with_gpu_assignments_from_args,\n    call_wrapped_kwargs)\nfrom .cluster_parallelism import (\n    add_cluster_parallelism_args,\n    cluster_results_from_args)\n\ntqdm.monitor_interval = 0  # see https://github.com/tqdm/tqdm/issues/481\n\n# To avoid pickling large matrices to send to child processes when running in\n# parallel, we use this global variable as a place to store data. Data that is\n# stored here before creating the thread pool will be inherited to the child\n# processes upon fork() call, allowing us to share large data with the workers\n# via shared memory.\nGLOBAL_DATA = {}\n\n# Note on parallelization:\n# When running in parallel, avoid using the neural network backend in the main\n# process. Model loading and inference should happen in worker processes.\n\nparser = argparse.ArgumentParser(usage=__doc__)\n\nparser.add_argument(\n    \"--data\",\n    metavar=\"FILE.csv\",\n    help=\"Training data CSV. Expected columns: peptide, n_flank, c_flank, hit\")\nparser.add_argument(\n    \"--out-models-dir\",\n    metavar=\"DIR\",\n    required=True,\n    help=\"Directory to write models and manifest\")\nparser.add_argument(\n    \"--hyperparameters\",\n    metavar=\"FILE.json\",\n    help=\"JSON or YAML of hyperparameters\")\nparser.add_argument(\n    \"--held-out-samples\",\n    type=int,\n    metavar=\"N\",\n    default=10,\n    help=\"Number of experiments to hold out per fold\")\nparser.add_argument(\n    \"--num-folds\",\n    type=int,\n    default=4,\n    metavar=\"N\",\n    help=\"Number of training folds.\")\nparser.add_argument(\n    \"--num-replicates\",\n    type=int,\n    metavar=\"N\",\n    default=1,\n    help=\"Number of replicates per (architecture, fold) pair to train.\")\nparser.add_argument(\n    \"--max-epochs\",\n    type=int,\n    metavar=\"N\",\n    help=\"Max training epochs. If specified here it overrides any 'max_epochs' \"\n    \"specified in the hyperparameters.\")\nparser.add_argument(\n    \"--verbosity\",\n    type=int,\n    help=\"Verbosity. Default: %(default)s\",\n    default=0)\nparser.add_argument(\n    \"--debug\",\n    action=\"store_true\",\n    default=False,\n    help=\"Launch python debugger on error\")\nparser.add_argument(\n    \"--continue-incomplete\",\n    action=\"store_true\",\n    default=False,\n    help=\"Continue training models from an incomplete training run. If this is \"\n    \"specified then the only required argument is --out-models-dir\")\nparser.add_argument(\n    \"--only-initialize\",\n    action=\"store_true\",\n    default=False,\n    help=\"Do not actually train models. The initialized run can be continued \"\n    \"later with --continue-incomplete.\")\n\nadd_local_parallelism_args(parser)\nadd_cluster_parallelism_args(parser)\n\n\ndef assign_folds(df, num_folds, held_out_samples):\n    \"\"\"\n    Split training data into mulitple test/train pairs, which we refer to as\n    folds. Note that a given data point may be assigned to multiple test or\n    train sets; these folds are NOT a non-overlapping partition as used in cross\n    validation.\n\n    A fold is defined by a boolean value for each data point, indicating whether\n    it is included in the training data for that fold. If it's not in the\n    training data, then it's in the test data.\n\n    Parameters\n    ----------\n    df : pandas.DataFrame\n        training data\n    num_folds : int\n    held_out_samples : int\n\n    Returns\n    -------\n    pandas.DataFrame\n        index is same as df.index, columns are \"fold_0\", ... \"fold_N\" giving\n        whether the data point is in the training data for the fold\n    \"\"\"\n    result_df = pandas.DataFrame(index=df.index)\n    sample_names = pandas.Series(df.sample_id.unique())\n\n    for fold in range(num_folds):\n        samples_to_exclude = sample_names.sample(n=held_out_samples)\n        result_df[\"fold_%d\" % fold] = ~df.sample_id.isin(samples_to_exclude)\n        print(\"Fold\", fold, \"holding out samples\", *samples_to_exclude)\n\n    print(\"Training points per fold\")\n    print(result_df.sum())\n\n    print(\"Test points per fold\")\n    print((~result_df).sum())\n    return result_df\n\n\ndef run(argv=sys.argv[1:]):\n    # On sigusr1 print stack trace\n    print(\"To show stack trace, run:\\nkill -s USR1 %d\" % os.getpid())\n    signal.signal(signal.SIGUSR1, lambda sig, frame: traceback.print_stack())\n\n    args = parser.parse_args(argv)\n\n    if args.debug:\n        try:\n            return main(args)\n        except Exception as e:\n            print(e)\n            import ipdb  # pylint: disable=import-error\n            ipdb.set_trace()\n            raise\n    else:\n        return main(args)\n\n\ndef main(args):\n    print(\"Arguments:\")\n    print(args)\n\n    args.out_models_dir = os.path.abspath(args.out_models_dir)\n    configure_logging(verbose=args.verbosity > 1)\n\n    if not args.continue_incomplete:\n        initialize_training(args)\n\n    if not args.only_initialize:\n        train_models(args)\n\n\ndef initialize_training(args):\n    required_arguments = [\n        \"data\",\n        \"out_models_dir\",\n        \"hyperparameters\",\n        \"num_folds\",\n    ]\n    for arg in required_arguments:\n        if getattr(args, arg) is None:\n            parser.error(\"Missing required arg: %s\" % arg)\n\n    print(\"Initializing training.\")\n    hyperparameters_lst = yaml.unsafe_load(open(args.hyperparameters))\n    assert isinstance(hyperparameters_lst, list)\n    print(\"Loaded hyperparameters list:\")\n\n    if len(hyperparameters_lst) > 7:\n        pprint.pprint(hyperparameters_lst[:3])\n        print(\"...\")\n        pprint.pprint(hyperparameters_lst[-3:])\n    else:\n        pprint.pprint(hyperparameters_lst)\n    print(\"Length of hyperparameters list: %d\" % (len(hyperparameters_lst)))\n\n    df = pandas.read_csv(args.data)\n    print(\"Loaded training data: %s\" % (str(df.shape)))\n    df = df.loc[\n        (df.peptide.str.len() >= 8) & (df.peptide.str.len() <= 15)\n    ]\n    print(\"Subselected to 8-15mers: %s\" % (str(df.shape)))\n    folds_df = assign_folds(\n        df=df,\n        num_folds=args.num_folds,\n        held_out_samples=args.held_out_samples)\n\n    if not os.path.exists(args.out_models_dir):\n        print(\"Attempting to create directory: %s\" % args.out_models_dir)\n        os.mkdir(args.out_models_dir)\n        print(\"Done.\")\n\n    predictor = Class1ProcessingPredictor(\n        models=[],\n        metadata_dataframes={\n            'train_data': pandas.merge(\n                df,\n                folds_df,\n                left_index=True,\n                right_index=True)\n        })\n\n    work_items = []\n    for (h, hyperparameters) in enumerate(hyperparameters_lst):\n        if args.max_epochs:\n            hyperparameters['max_epochs'] = args.max_epochs\n\n        for fold in range(args.num_folds):\n            for replicate in range(args.num_replicates):\n                work_dict = {\n                    'work_item_name': str(uuid.uuid4()),\n                    'architecture_num': h,\n                    'num_architectures': len(hyperparameters_lst),\n                    'fold_num': fold,\n                    'num_folds': args.num_folds,\n                    'replicate_num': replicate,\n                    'num_replicates': args.num_replicates,\n                    'hyperparameters': hyperparameters,\n                }\n                work_items.append(work_dict)\n\n    training_init_info = {}\n    training_init_info[\"train_data\"] = df\n    training_init_info[\"folds_df\"] = folds_df\n    training_init_info[\"work_items\"] = work_items\n\n    # Save empty predictor (for metadata)\n    predictor.save(args.out_models_dir)\n\n    # Write training_init_info.\n    with open(join(args.out_models_dir, \"training_init_info.pkl\"), \"wb\") as fd:\n        pickle.dump(training_init_info, fd, protocol=pickle.HIGHEST_PROTOCOL)\n\n    print(\"Done initializing training.\")\n\n\ndef train_models(args):\n    global GLOBAL_DATA\n\n    print(\"Beginning training.\")\n    predictor = Class1ProcessingPredictor.load(args.out_models_dir)\n    print(\"Loaded predictor with %d networks\" % len(predictor.models))\n\n    with open(join(args.out_models_dir, \"training_init_info.pkl\"), \"rb\") as fd:\n        GLOBAL_DATA.update(pickle.load(fd))\n    print(\"Loaded training init info.\")\n\n    all_work_items = GLOBAL_DATA[\"work_items\"]\n    complete_work_item_names = [\n        network.fit_info[-1][\"training_info\"][\"work_item_name\"]\n        for network in predictor.models\n    ]\n    work_items = [\n        item for item in all_work_items\n        if item[\"work_item_name\"] not in complete_work_item_names\n    ]\n    print(\"Found %d work items, of which %d are incomplete and will run now.\" % (\n        len(all_work_items), len(work_items)))\n\n    serial_run = not args.cluster_parallelism and args.num_jobs == 0\n\n    # The estimated time to completion is more accurate if we randomize\n    # the order of the work.\n    random.shuffle(work_items)\n    for (work_item_num, item) in enumerate(work_items):\n        item['work_item_num'] = work_item_num\n        item['num_work_items'] = len(work_items)\n        item['progress_print_interval'] = 60.0 if not serial_run else 5.0\n        item['predictor'] = predictor if serial_run else None\n        item['save_to'] = args.out_models_dir if serial_run else None\n        item['verbose'] = args.verbosity\n\n    start = time.time()\n\n    worker_pool = None\n    if serial_run:\n        # Run in serial. Every worker is passed the same predictor,\n        # which it adds models to, so no merging is required. It also saves\n        # as it goes so no saving is required at the end.\n        print(\"Processing %d work items in serial.\" % len(work_items))\n        for _ in tqdm.trange(len(work_items)):\n            item = work_items.pop(0)  # want to keep freeing up memory\n            work_predictor = train_model(**item)\n            assert work_predictor is predictor\n            pprint.pprint(predictor.models[-1].fit_info[-1]['training_info'])\n        assert not work_items\n        results_generator = None\n    elif args.cluster_parallelism:\n        # Run using separate processes HPC cluster.\n        results_generator = cluster_results_from_args(\n            args,\n            work_function=train_model,\n            work_items=work_items,\n            constant_data=GLOBAL_DATA,\n            result_serialization_method=\"pickle\")\n    else:\n        worker_pool = worker_pool_with_gpu_assignments_from_args(args)\n        print(\"Worker pool\", worker_pool)\n        assert worker_pool is not None\n\n        print(\"Processing %d work items in parallel.\" % len(work_items))\n        assert not serial_run\n\n        for item in work_items:\n            item['constant_data'] = GLOBAL_DATA\n\n        results_generator = worker_pool.imap_unordered(\n            partial(call_wrapped_kwargs, train_model),\n            work_items,\n            chunksize=1)\n\n    if results_generator:\n        for new_predictor in tqdm.tqdm(results_generator, total=len(work_items)):\n            save_start = time.time()\n            (model,) = new_predictor.models\n            pprint.pprint(model.fit_info[-1]['training_info'])\n            (new_model_name,) = predictor.add_models(new_predictor.models)\n            predictor.save(\n                args.out_models_dir,\n                model_names_to_write=[new_model_name],\n                write_metadata=False)\n            print(\n                \"Saved predictor (%d models total) with 1 new models\"\n                \"in %0.2f sec to %s\" % (\n                    len(predictor.models),\n                    time.time() - save_start,\n                    args.out_models_dir))\n\n    predictor.save(args.out_models_dir)\n    print(\"Done saving.\")\n\n    print(\"*\" * 30)\n    training_time = time.time() - start\n    print(\"Trained affinity predictor with %d networks in %0.2f min.\" % (\n        len(predictor.models), training_time / 60.0))\n    print(\"*\" * 30)\n\n    if worker_pool:\n        worker_pool.close()\n        worker_pool.join()\n\n    print(\"Predictor written to: %s\" % args.out_models_dir)\n\n\ndef train_model(\n        work_item_name,\n        work_item_num,\n        num_work_items,\n        architecture_num,\n        num_architectures,\n        fold_num,\n        num_folds,\n        replicate_num,\n        num_replicates,\n        hyperparameters,\n        verbose,\n        progress_print_interval,\n        predictor,\n        save_to,\n        constant_data=GLOBAL_DATA):\n\n    from sklearn.metrics import roc_auc_score\n    from mhcflurry.flanking_encoding import FlankingEncoding\n\n    df = constant_data[\"train_data\"]\n    folds_df = constant_data[\"folds_df\"]\n\n    if predictor is None:\n        predictor = Class1ProcessingPredictor(models=[])\n\n    numpy.testing.assert_equal(len(df), len(folds_df))\n\n    train_data = df.loc[\n        folds_df[\"fold_%d\" % fold_num]\n    ].sample(frac=1.0).copy()\n\n    test_data = df.loc[~folds_df[\"fold_%d\" % fold_num]].copy()\n\n    print(\"Training on %d points (%d points held-out).\" % (\n        len(train_data), len(test_data)))\n\n    progress_preamble = (\n        \"[task %2d / %2d]: \"\n        \"[%2d / %2d folds] \"\n        \"[%2d / %2d architectures] \"\n        \"[%4d / %4d replicates] \" % (\n            work_item_num + 1,\n            num_work_items,\n            fold_num + 1,\n            num_folds,\n            architecture_num + 1,\n            num_architectures,\n            replicate_num + 1,\n            num_replicates))\n\n    print(\"%s [pid %d]. Hyperparameters:\" % (progress_preamble, os.getpid()))\n    pprint.pprint(hyperparameters)\n\n    model = Class1ProcessingNeuralNetwork(**hyperparameters)\n    model.fit(\n        sequences=FlankingEncoding(\n            peptides=train_data.peptide.values,\n            n_flanks=train_data.n_flank.values,\n            c_flanks=train_data.c_flank.values),\n        targets=train_data.hit.values,\n        progress_preamble=progress_preamble,\n        progress_print_interval=progress_print_interval,\n        verbose=verbose)\n\n    # Save model-specific training info\n    train_peptide_hash = hashlib.sha1()\n    for peptide in sorted(train_data.peptide.values):\n        train_peptide_hash.update(peptide.encode())\n\n    # Compute AUC on held-out data just so it can be logged.\n    for some_df in [train_data, test_data]:\n        some_df[\"prediction\"] = model.predict(\n            peptides=some_df.peptide.values,\n            n_flanks=some_df.n_flank.values,\n            c_flanks=some_df.c_flank.values)\n    train_auc = roc_auc_score(\n        train_data.hit.values, train_data.prediction.values)\n    test_auc = roc_auc_score(test_data.hit.values, test_data.prediction.values)\n    print(\"Train AUC\", train_auc)\n    print(\"Test AUC\", test_auc)\n\n    model.fit_info[-1].setdefault(\"training_info\", {}).update({\n        \"fold_num\": fold_num,\n        \"num_folds\": num_folds,\n        \"replicate_num\": replicate_num,\n        \"num_replicates\": num_replicates,\n        \"architecture_num\": architecture_num,\n        \"num_architectures\": num_architectures,\n        \"train_peptide_hash\": train_peptide_hash.hexdigest(),\n        \"work_item_name\": work_item_name,\n        \"train_auc\": train_auc,\n        \"test_auc\": test_auc,\n    })\n\n    numpy.testing.assert_equal(\n        predictor.manifest_df.shape[0], len(predictor.models))\n    predictor.add_models([model])\n    if save_to:\n        predictor.save(save_to)\n        print(\"Wrote\", save_to)\n    numpy.testing.assert_equal(\n        predictor.manifest_df.shape[0], len(predictor.models))\n\n    # Delete the network to release memory\n    model._network = None  # release network to free memory\n    return predictor\n\n\nif __name__ == '__main__':\n    run()\n"
  },
  {
    "path": "mhcflurry/version.py",
    "content": "__version__ = \"2.2.1\"\n"
  },
  {
    "path": "notebooks/example1.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {\n    \"collapsed\": true\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# Simple example for generating predictions with MHCflurry.\\n\",\n    \"import mhcflurry\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Using TensorFlow backend.\\n\",\n      \"WARNING: Logging before flag parsing goes to stderr.\\n\",\n      \"W0605 12:25:31.131366 4512298432 deprecation.py:506] From /Users/tim/miniconda3/envs/py36/lib/python3.6/site-packages/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\\n\",\n      \"Instructions for updating:\\n\",\n      \"If using Keras pass *_constraint arguments to layers.\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"<Class1PresentationPredictor at 0x148588128 [mhcflurry 1.7.0] generated on Fri May  1 11:08:06 2020>\"\n      ]\n     },\n     \"execution_count\": 2,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# Load a predictor\\n\",\n    \"predictor = mhcflurry.Class1PresentationPredictor.load()\\n\",\n    \"predictor\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 3,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\r\",\n      \"  0%|          | 0/1 [00:00<?, ?it/s]\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Predicting processing.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"W0605 12:25:34.682682 4512298432 module_wrapper.py:139] From /Users/tim/miniconda3/envs/py36/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:422: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\\n\",\n      \"\\n\",\n      \"100%|██████████| 1/1 [00:05<00:00,  5.06s/it]\\n\",\n      \"  0%|          | 0/1 [00:00<?, ?it/s]\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Predicting affinities.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"100%|██████████| 1/1 [00:00<00:00,  2.09it/s]\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<div>\\n\",\n       \"<style scoped>\\n\",\n       \"    .dataframe tbody tr th:only-of-type {\\n\",\n       \"        vertical-align: middle;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe tbody tr th {\\n\",\n       \"        vertical-align: top;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe thead th {\\n\",\n       \"        text-align: right;\\n\",\n       \"    }\\n\",\n       \"</style>\\n\",\n       \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n       \"  <thead>\\n\",\n       \"    <tr style=\\\"text-align: right;\\\">\\n\",\n       \"      <th></th>\\n\",\n       \"      <th>peptide</th>\\n\",\n       \"      <th>peptide_num</th>\\n\",\n       \"      <th>sample_name</th>\\n\",\n       \"      <th>affinity</th>\\n\",\n       \"      <th>best_allele</th>\\n\",\n       \"      <th>processing_score</th>\\n\",\n       \"      <th>presentation_score</th>\\n\",\n       \"      <th>presentation_percentile</th>\\n\",\n       \"    </tr>\\n\",\n       \"  </thead>\\n\",\n       \"  <tbody>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>0</th>\\n\",\n       \"      <td>NLVPMVATV</td>\\n\",\n       \"      <td>0</td>\\n\",\n       \"      <td>sample1</td>\\n\",\n       \"      <td>23.634000</td>\\n\",\n       \"      <td>A*02:01</td>\\n\",\n       \"      <td>0.456810</td>\\n\",\n       \"      <td>0.990288</td>\\n\",\n       \"      <td>0.010220</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>1</th>\\n\",\n       \"      <td>KLLEIPDPDKNWATL</td>\\n\",\n       \"      <td>1</td>\\n\",\n       \"      <td>sample1</td>\\n\",\n       \"      <td>1539.483711</td>\\n\",\n       \"      <td>A*02:01</td>\\n\",\n       \"      <td>0.605072</td>\\n\",\n       \"      <td>0.538179</td>\\n\",\n       \"      <td>0.754209</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>2</th>\\n\",\n       \"      <td>RANDMPEPTIDE</td>\\n\",\n       \"      <td>2</td>\\n\",\n       \"      <td>sample1</td>\\n\",\n       \"      <td>18589.200876</td>\\n\",\n       \"      <td>A*02:01</td>\\n\",\n       \"      <td>0.006360</td>\\n\",\n       \"      <td>0.007214</td>\\n\",\n       \"      <td>59.896462</td>\\n\",\n       \"    </tr>\\n\",\n       \"  </tbody>\\n\",\n       \"</table>\\n\",\n       \"</div>\"\n      ],\n      \"text/plain\": [\n       \"           peptide  peptide_num sample_name      affinity best_allele  \\\\\\n\",\n       \"0        NLVPMVATV            0     sample1     23.634000     A*02:01   \\n\",\n       \"1  KLLEIPDPDKNWATL            1     sample1   1539.483711     A*02:01   \\n\",\n       \"2     RANDMPEPTIDE            2     sample1  18589.200876     A*02:01   \\n\",\n       \"\\n\",\n       \"   processing_score  presentation_score  presentation_percentile  \\n\",\n       \"0          0.456810            0.990288                 0.010220  \\n\",\n       \"1          0.605072            0.538179                 0.754209  \\n\",\n       \"2          0.006360            0.007214                59.896462  \"\n      ]\n     },\n     \"execution_count\": 3,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# Predict for individiual peptides.\\n\",\n    \"# Try help(predictor.predict) for other options.\\n\",\n    \"results1 = predictor.predict([\\\"NLVPMVATV\\\", \\\"KLLEIPDPDKNWATL\\\", \\\"RANDMPEPTIDE\\\"], [\\\"A*02:01\\\"])\\n\",\n    \"results1\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 4,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\r\",\n      \"  0%|          | 0/1 [00:00<?, ?it/s]\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Predicting processing.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"100%|██████████| 1/1 [00:08<00:00,  8.92s/it]\\n\",\n      \" 40%|████      | 2/5 [00:00<00:00, 18.32it/s]\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Predicting affinities.\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"100%|██████████| 5/5 [00:00<00:00, 19.89it/s]\\n\"\n     ]\n    },\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<div>\\n\",\n       \"<style scoped>\\n\",\n       \"    .dataframe tbody tr th:only-of-type {\\n\",\n       \"        vertical-align: middle;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe tbody tr th {\\n\",\n       \"        vertical-align: top;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe thead th {\\n\",\n       \"        text-align: right;\\n\",\n       \"    }\\n\",\n       \"</style>\\n\",\n       \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n       \"  <thead>\\n\",\n       \"    <tr style=\\\"text-align: right;\\\">\\n\",\n       \"      <th></th>\\n\",\n       \"      <th>sequence_name</th>\\n\",\n       \"      <th>pos</th>\\n\",\n       \"      <th>peptide</th>\\n\",\n       \"      <th>n_flank</th>\\n\",\n       \"      <th>c_flank</th>\\n\",\n       \"      <th>sample_name</th>\\n\",\n       \"      <th>affinity</th>\\n\",\n       \"      <th>best_allele</th>\\n\",\n       \"      <th>affinity_percentile</th>\\n\",\n       \"      <th>processing_score</th>\\n\",\n       \"      <th>presentation_score</th>\\n\",\n       \"      <th>presentation_percentile</th>\\n\",\n       \"    </tr>\\n\",\n       \"  </thead>\\n\",\n       \"  <tbody>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>0</th>\\n\",\n       \"      <td>protein1</td>\\n\",\n       \"      <td>14</td>\\n\",\n       \"      <td>LLLVVSNLL</td>\\n\",\n       \"      <td>MDSKGSSQKGSRLL</td>\\n\",\n       \"      <td></td>\\n\",\n       \"      <td>sample1</td>\\n\",\n       \"      <td>119.641959</td>\\n\",\n       \"      <td>A0201</td>\\n\",\n       \"      <td>0.428125</td>\\n\",\n       \"      <td>0.103990</td>\\n\",\n       \"      <td>0.790948</td>\\n\",\n       \"      <td>0.328758</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>1</th>\\n\",\n       \"      <td>protein1</td>\\n\",\n       \"      <td>13</td>\\n\",\n       \"      <td>LLLLVVSNL</td>\\n\",\n       \"      <td>MDSKGSSQKGSRL</td>\\n\",\n       \"      <td>L</td>\\n\",\n       \"      <td>sample1</td>\\n\",\n       \"      <td>165.667454</td>\\n\",\n       \"      <td>A0201</td>\\n\",\n       \"      <td>0.513375</td>\\n\",\n       \"      <td>0.028328</td>\\n\",\n       \"      <td>0.656283</td>\\n\",\n       \"      <td>0.540264</td>\\n\",\n       \"    </tr>\\n\",\n       \"  </tbody>\\n\",\n       \"</table>\\n\",\n       \"</div>\"\n      ],\n      \"text/plain\": [\n       \"  sequence_name  pos    peptide         n_flank c_flank sample_name  \\\\\\n\",\n       \"0      protein1   14  LLLVVSNLL  MDSKGSSQKGSRLL             sample1   \\n\",\n       \"1      protein1   13  LLLLVVSNL   MDSKGSSQKGSRL       L     sample1   \\n\",\n       \"\\n\",\n       \"     affinity best_allele  affinity_percentile  processing_score  \\\\\\n\",\n       \"0  119.641959       A0201             0.428125          0.103990   \\n\",\n       \"1  165.667454       A0201             0.513375          0.028328   \\n\",\n       \"\\n\",\n       \"   presentation_score  presentation_percentile  \\n\",\n       \"0            0.790948                 0.328758  \\n\",\n       \"1            0.656283                 0.540264  \"\n      ]\n     },\n     \"execution_count\": 4,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# Predict across protein sequences\\n\",\n    \"# Try help(predictor.predict_sequences) for other options.\\n\",\n    \"results2 = predictor.predict_sequences(\\n\",\n    \"    sequences={\\n\",\n    \"        'protein1': \\\"MDSKGSSQKGSRLLLLLVVSNLL\\\",\\n\",\n    \"        'protein2': \\\"SSLPTPEDKEQAQQTHH\\\",\\n\",\n    \"    },\\n\",\n    \"    alleles={\\n\",\n    \"        \\\"sample1\\\": [\\\"A0201\\\", \\\"A0301\\\", \\\"B0702\\\"],\\n\",\n    \"        \\\"sample2\\\": [\\\"A0101\\\", \\\"C0202\\\"],\\n\",\n    \"    },\\n\",\n    \"    result=\\\"filtered\\\",\\n\",\n    \"    comparison_quantity=\\\"affinity\\\",\\n\",\n    \"    filter_value=500)\\n\",\n    \"results2\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python 3\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.6.1\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 4\n}\n"
  },
  {
    "path": "notebooks/mhcflurry-colab.ipynb",
    "content": "{\n  \"cells\": [\n    {\n      \"cell_type\": \"markdown\",\n      \"source\": [\n        \"# Setup\"\n      ],\n      \"metadata\": {\n        \"id\": \"ZBP3mFgsZTxA\"\n      }\n    },\n    {\n      \"cell_type\": \"markdown\",\n      \"source\": [\n        \"This notebook demonstrates how to generate predictions using MHCflurry.\"\n      ],\n      \"metadata\": {\n        \"id\": \"c4ukEYkrco5H\"\n      }\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"# Install the package and download models\\n\",\n        \"!pip install -q mhcflurry\\n\",\n        \"!mhcflurry-downloads --quiet fetch models_class1_presentation\"\n      ],\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\"\n        },\n        \"id\": \"uUFQLxFgZTAO\",\n        \"outputId\": \"66ad515b-81bb-46ea-9760-58939109f73a\"\n      },\n      \"execution_count\": 1,\n      \"outputs\": [\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stdout\",\n          \"text\": [\n            \"\\u001b[K     |████████████████████████████████| 140 kB 5.0 MB/s \\n\",\n            \"\\u001b[K     |████████████████████████████████| 103 kB 7.7 MB/s \\n\",\n            \"\\u001b[K     |████████████████████████████████| 61 kB 275 kB/s \\n\",\n            \"\\u001b[K     |████████████████████████████████| 636 kB 9.9 MB/s \\n\",\n            \"\\u001b[K     |████████████████████████████████| 130 kB 12.9 MB/s \\n\",\n            \"\\u001b[?25h  Building wheel for np-utils (setup.py) ... \\u001b[?25l\\u001b[?25hdone\\n\",\n            \"  Building wheel for serializable (setup.py) ... \\u001b[?25l\\u001b[?25hdone\\n\",\n            \"  Building wheel for typechecks (setup.py) ... \\u001b[?25l\\u001b[?25hdone\\n\",\n            \"135MB [00:01, 73.1MB/s]               \\n\",\n            \"Extracting: 100% 62/62 [00:15<00:00,  3.89it/s]\\n\"\n          ]\n        }\n      ]\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"# Imports\\n\",\n        \"import mhcflurry\\n\",\n        \"from google.colab import files\\n\",\n        \"\\n\",\n        \"# Quiet warnings\\n\",\n        \"import warnings\\n\",\n        \"warnings.filterwarnings('ignore')\"\n      ],\n      \"metadata\": {\n        \"id\": \"1sqAFdItWwd5\"\n      },\n      \"execution_count\": 2,\n      \"outputs\": []\n    },\n    {\n      \"cell_type\": \"code\",\n      \"execution_count\": 3,\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\"\n        },\n        \"id\": \"sHCsU4dNRGBk\",\n        \"outputId\": \"9fd7fec5-ce28-416f-b9b1-aeb4062c79a3\"\n      },\n      \"outputs\": [\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stdout\",\n          \"text\": [\n            \"Forcing tensorflow backend.\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stderr\",\n          \"text\": [\n            \"WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/tensorflow/python/compat/v2_compat.py:107: disable_resource_variables (from tensorflow.python.ops.variable_scope) is deprecated and will be removed in a future version.\\n\",\n            \"Instructions for updating:\\n\",\n            \"non-resource variables are not supported in the long term\\n\",\n            \"WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/mhcflurry/common.py:131: The name tf.keras.backend.set_session is deprecated. Please use tf.compat.v1.keras.backend.set_session instead.\\n\",\n            \"\\n\",\n            \"WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/keras/initializers/initializers_v1.py:278: calling RandomUniform.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\\n\",\n            \"Instructions for updating:\\n\",\n            \"Call initializer instance with the dtype argument instead of passing it to the constructor\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"execute_result\",\n          \"data\": {\n            \"text/plain\": [\n              \"<Class1PresentationPredictor at 0x7fea8ee35090 [mhcflurry 2.0.6] generated on Thu Jun 11 13:37:18 2020>\"\n            ]\n          },\n          \"metadata\": {},\n          \"execution_count\": 3\n        }\n      ],\n      \"source\": [\n        \"# Load a predictor\\n\",\n        \"predictor = mhcflurry.Class1PresentationPredictor.load()\\n\",\n        \"predictor\"\n      ]\n    },\n    {\n      \"cell_type\": \"markdown\",\n      \"source\": [\n        \"# Predict for specified peptides\"\n      ],\n      \"metadata\": {\n        \"id\": \"SZyoFee7ZlaH\"\n      }\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"peptides = \\\"\\\"\\\"\\n\",\n        \"NLVPMVATV\\n\",\n        \"RANDMPEPTIDE\\n\",\n        \"SIINFEKL\\n\",\n        \"\\\"\\\"\\\".split()\\n\",\n        \"\\n\",\n        \"alleles = \\\"A*02:01 B*27:01 H2-Kb\\\".split()\\n\",\n        \"\\n\",\n        \"results1 = predictor.predict(peptides, alleles)\\n\",\n        \"results1\"\n      ],\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\",\n          \"height\": 342\n        },\n        \"id\": \"LaoQdnXlZLo8\",\n        \"outputId\": \"755bd5b8-6dd8-4532-8781-395552be82e0\"\n      },\n      \"execution_count\": 4,\n      \"outputs\": [\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stdout\",\n          \"text\": [\n            \"Predicting processing.\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stderr\",\n          \"text\": [\n            \"100%|██████████| 1/1 [00:05<00:00,  5.26s/it]\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stdout\",\n          \"text\": [\n            \"Predicting affinities.\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stderr\",\n          \"text\": [\n            \"\\r  0%|          | 0/3 [00:00<?, ?it/s]WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/init_ops.py:93: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\\n\",\n            \"Instructions for updating:\\n\",\n            \"Call initializer instance with the dtype argument instead of passing it to the constructor\\n\",\n            \"WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/init_ops.py:93: calling Zeros.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\\n\",\n            \"Instructions for updating:\\n\",\n            \"Call initializer instance with the dtype argument instead of passing it to the constructor\\n\",\n            \"100%|██████████| 3/3 [00:09<00:00,  3.30s/it]\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"execute_result\",\n          \"data\": {\n            \"text/plain\": [\n              \"        peptide  peptide_num sample_name      affinity best_allele  \\\\\\n\",\n              \"0     NLVPMVATV            0     sample1     16.570972     A*02:01   \\n\",\n              \"1  RANDMPEPTIDE            1     sample1  21780.313255     B*27:01   \\n\",\n              \"2      SIINFEKL            2     sample1     19.707210       H2-Kb   \\n\",\n              \"\\n\",\n              \"   processing_score  presentation_score  presentation_percentile  \\n\",\n              \"0          0.533008            0.970187                 0.018723  \\n\",\n              \"1          0.008492            0.004732                62.744674  \\n\",\n              \"2          0.264710            0.914111                 0.099511  \"\n            ],\n            \"text/html\": [\n              \"\\n\",\n              \"  <div id=\\\"df-3bcfc280-e3d4-4912-ab50-d4e8595b95b5\\\">\\n\",\n              \"    <div class=\\\"colab-df-container\\\">\\n\",\n              \"      <div>\\n\",\n              \"<style scoped>\\n\",\n              \"    .dataframe tbody tr th:only-of-type {\\n\",\n              \"        vertical-align: middle;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .dataframe tbody tr th {\\n\",\n              \"        vertical-align: top;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .dataframe thead th {\\n\",\n              \"        text-align: right;\\n\",\n              \"    }\\n\",\n              \"</style>\\n\",\n              \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n              \"  <thead>\\n\",\n              \"    <tr style=\\\"text-align: right;\\\">\\n\",\n              \"      <th></th>\\n\",\n              \"      <th>peptide</th>\\n\",\n              \"      <th>peptide_num</th>\\n\",\n              \"      <th>sample_name</th>\\n\",\n              \"      <th>affinity</th>\\n\",\n              \"      <th>best_allele</th>\\n\",\n              \"      <th>processing_score</th>\\n\",\n              \"      <th>presentation_score</th>\\n\",\n              \"      <th>presentation_percentile</th>\\n\",\n              \"    </tr>\\n\",\n              \"  </thead>\\n\",\n              \"  <tbody>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>0</th>\\n\",\n              \"      <td>NLVPMVATV</td>\\n\",\n              \"      <td>0</td>\\n\",\n              \"      <td>sample1</td>\\n\",\n              \"      <td>16.570972</td>\\n\",\n              \"      <td>A*02:01</td>\\n\",\n              \"      <td>0.533008</td>\\n\",\n              \"      <td>0.970187</td>\\n\",\n              \"      <td>0.018723</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>1</th>\\n\",\n              \"      <td>RANDMPEPTIDE</td>\\n\",\n              \"      <td>1</td>\\n\",\n              \"      <td>sample1</td>\\n\",\n              \"      <td>21780.313255</td>\\n\",\n              \"      <td>B*27:01</td>\\n\",\n              \"      <td>0.008492</td>\\n\",\n              \"      <td>0.004732</td>\\n\",\n              \"      <td>62.744674</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>2</th>\\n\",\n              \"      <td>SIINFEKL</td>\\n\",\n              \"      <td>2</td>\\n\",\n              \"      <td>sample1</td>\\n\",\n              \"      <td>19.707210</td>\\n\",\n              \"      <td>H2-Kb</td>\\n\",\n              \"      <td>0.264710</td>\\n\",\n              \"      <td>0.914111</td>\\n\",\n              \"      <td>0.099511</td>\\n\",\n              \"    </tr>\\n\",\n              \"  </tbody>\\n\",\n              \"</table>\\n\",\n              \"</div>\\n\",\n              \"      <button class=\\\"colab-df-convert\\\" onclick=\\\"convertToInteractive('df-3bcfc280-e3d4-4912-ab50-d4e8595b95b5')\\\"\\n\",\n              \"              title=\\\"Convert this dataframe to an interactive table.\\\"\\n\",\n              \"              style=\\\"display:none;\\\">\\n\",\n              \"        \\n\",\n              \"  <svg xmlns=\\\"http://www.w3.org/2000/svg\\\" height=\\\"24px\\\"viewBox=\\\"0 0 24 24\\\"\\n\",\n              \"       width=\\\"24px\\\">\\n\",\n              \"    <path d=\\\"M0 0h24v24H0V0z\\\" fill=\\\"none\\\"/>\\n\",\n              \"    <path d=\\\"M18.56 5.44l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94zm-11 1L8.5 8.5l.94-2.06 2.06-.94-2.06-.94L8.5 2.5l-.94 2.06-2.06.94zm10 10l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94z\\\"/><path d=\\\"M17.41 7.96l-1.37-1.37c-.4-.4-.92-.59-1.43-.59-.52 0-1.04.2-1.43.59L10.3 9.45l-7.72 7.72c-.78.78-.78 2.05 0 2.83L4 21.41c.39.39.9.59 1.41.59.51 0 1.02-.2 1.41-.59l7.78-7.78 2.81-2.81c.8-.78.8-2.07 0-2.86zM5.41 20L4 18.59l7.72-7.72 1.47 1.35L5.41 20z\\\"/>\\n\",\n              \"  </svg>\\n\",\n              \"      </button>\\n\",\n              \"      \\n\",\n              \"  <style>\\n\",\n              \"    .colab-df-container {\\n\",\n              \"      display:flex;\\n\",\n              \"      flex-wrap:wrap;\\n\",\n              \"      gap: 12px;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .colab-df-convert {\\n\",\n              \"      background-color: #E8F0FE;\\n\",\n              \"      border: none;\\n\",\n              \"      border-radius: 50%;\\n\",\n              \"      cursor: pointer;\\n\",\n              \"      display: none;\\n\",\n              \"      fill: #1967D2;\\n\",\n              \"      height: 32px;\\n\",\n              \"      padding: 0 0 0 0;\\n\",\n              \"      width: 32px;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .colab-df-convert:hover {\\n\",\n              \"      background-color: #E2EBFA;\\n\",\n              \"      box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\\n\",\n              \"      fill: #174EA6;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    [theme=dark] .colab-df-convert {\\n\",\n              \"      background-color: #3B4455;\\n\",\n              \"      fill: #D2E3FC;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    [theme=dark] .colab-df-convert:hover {\\n\",\n              \"      background-color: #434B5C;\\n\",\n              \"      box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\\n\",\n              \"      filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\\n\",\n              \"      fill: #FFFFFF;\\n\",\n              \"    }\\n\",\n              \"  </style>\\n\",\n              \"\\n\",\n              \"      <script>\\n\",\n              \"        const buttonEl =\\n\",\n              \"          document.querySelector('#df-3bcfc280-e3d4-4912-ab50-d4e8595b95b5 button.colab-df-convert');\\n\",\n              \"        buttonEl.style.display =\\n\",\n              \"          google.colab.kernel.accessAllowed ? 'block' : 'none';\\n\",\n              \"\\n\",\n              \"        async function convertToInteractive(key) {\\n\",\n              \"          const element = document.querySelector('#df-3bcfc280-e3d4-4912-ab50-d4e8595b95b5');\\n\",\n              \"          const dataTable =\\n\",\n              \"            await google.colab.kernel.invokeFunction('convertToInteractive',\\n\",\n              \"                                                     [key], {});\\n\",\n              \"          if (!dataTable) return;\\n\",\n              \"\\n\",\n              \"          const docLinkHtml = 'Like what you see? Visit the ' +\\n\",\n              \"            '<a target=\\\"_blank\\\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\\n\",\n              \"            + ' to learn more about interactive tables.';\\n\",\n              \"          element.innerHTML = '';\\n\",\n              \"          dataTable['output_type'] = 'display_data';\\n\",\n              \"          await google.colab.output.renderOutput(dataTable, element);\\n\",\n              \"          const docLink = document.createElement('div');\\n\",\n              \"          docLink.innerHTML = docLinkHtml;\\n\",\n              \"          element.appendChild(docLink);\\n\",\n              \"        }\\n\",\n              \"      </script>\\n\",\n              \"    </div>\\n\",\n              \"  </div>\\n\",\n              \"  \"\n            ]\n          },\n          \"metadata\": {},\n          \"execution_count\": 4\n        }\n      ]\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"# Download results\\n\",\n        \"results1.to_csv('mhcflurry-results.csv')\\n\",\n        \"files.download('mhcflurry-results.csv')\"\n      ],\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\",\n          \"height\": 17\n        },\n        \"id\": \"B2YXOk6waeli\",\n        \"outputId\": \"c933a1b7-65c5-44da-dacc-8ab3813de681\"\n      },\n      \"execution_count\": 5,\n      \"outputs\": [\n        {\n          \"output_type\": \"display_data\",\n          \"data\": {\n            \"text/plain\": [\n              \"<IPython.core.display.Javascript object>\"\n            ],\n            \"application/javascript\": [\n              \"\\n\",\n              \"    async function download(id, filename, size) {\\n\",\n              \"      if (!google.colab.kernel.accessAllowed) {\\n\",\n              \"        return;\\n\",\n              \"      }\\n\",\n              \"      const div = document.createElement('div');\\n\",\n              \"      const label = document.createElement('label');\\n\",\n              \"      label.textContent = `Downloading \\\"${filename}\\\": `;\\n\",\n              \"      div.appendChild(label);\\n\",\n              \"      const progress = document.createElement('progress');\\n\",\n              \"      progress.max = size;\\n\",\n              \"      div.appendChild(progress);\\n\",\n              \"      document.body.appendChild(div);\\n\",\n              \"\\n\",\n              \"      const buffers = [];\\n\",\n              \"      let downloaded = 0;\\n\",\n              \"\\n\",\n              \"      const channel = await google.colab.kernel.comms.open(id);\\n\",\n              \"      // Send a message to notify the kernel that we're ready.\\n\",\n              \"      channel.send({})\\n\",\n              \"\\n\",\n              \"      for await (const message of channel.messages) {\\n\",\n              \"        // Send a message to notify the kernel that we're ready.\\n\",\n              \"        channel.send({})\\n\",\n              \"        if (message.buffers) {\\n\",\n              \"          for (const buffer of message.buffers) {\\n\",\n              \"            buffers.push(buffer);\\n\",\n              \"            downloaded += buffer.byteLength;\\n\",\n              \"            progress.value = downloaded;\\n\",\n              \"          }\\n\",\n              \"        }\\n\",\n              \"      }\\n\",\n              \"      const blob = new Blob(buffers, {type: 'application/binary'});\\n\",\n              \"      const a = document.createElement('a');\\n\",\n              \"      a.href = window.URL.createObjectURL(blob);\\n\",\n              \"      a.download = filename;\\n\",\n              \"      div.appendChild(a);\\n\",\n              \"      a.click();\\n\",\n              \"      div.remove();\\n\",\n              \"    }\\n\",\n              \"  \"\n            ]\n          },\n          \"metadata\": {}\n        },\n        {\n          \"output_type\": \"display_data\",\n          \"data\": {\n            \"text/plain\": [\n              \"<IPython.core.display.Javascript object>\"\n            ],\n            \"application/javascript\": [\n              \"download(\\\"download_044dbb46-ccba-49fe-876b-f71d363c8833\\\", \\\"mhcflurry-results.csv\\\", 434)\"\n            ]\n          },\n          \"metadata\": {}\n        }\n      ]\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"# See help for more options:\\n\",\n        \"help(predictor.predict)\"\n      ],\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\"\n        },\n        \"id\": \"OIYIyhiudmis\",\n        \"outputId\": \"d438606b-e747-4f28-9bdc-bb41733746bb\"\n      },\n      \"execution_count\": 6,\n      \"outputs\": [\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stdout\",\n          \"text\": [\n            \"Help on method predict in module mhcflurry.class1_presentation_predictor:\\n\",\n            \"\\n\",\n            \"predict(peptides, alleles, sample_names=None, n_flanks=None, c_flanks=None, include_affinity_percentile=False, verbose=1, throw=True) method of mhcflurry.class1_presentation_predictor.Class1PresentationPredictor instance\\n\",\n            \"    Predict presentation scores across a set of peptides.\\n\",\n            \"    \\n\",\n            \"    Presentation scores combine predictions for MHC I binding affinity\\n\",\n            \"    and antigen processing.\\n\",\n            \"    \\n\",\n            \"    This method returns a pandas.DataFrame giving presentation scores plus\\n\",\n            \"    the binding affinity and processing predictions and other intermediate\\n\",\n            \"    results.\\n\",\n            \"    \\n\",\n            \"    Example:\\n\",\n            \"    \\n\",\n            \"    >>> predictor = Class1PresentationPredictor.load()\\n\",\n            \"    >>> predictor.predict(\\n\",\n            \"    ...    peptides=[\\\"SIINFEKL\\\", \\\"PEPTIDE\\\"],\\n\",\n            \"    ...    n_flanks=[\\\"NNN\\\", \\\"SNS\\\"],\\n\",\n            \"    ...    c_flanks=[\\\"CCC\\\", \\\"CNC\\\"],\\n\",\n            \"    ...    alleles={\\n\",\n            \"    ...        \\\"sample1\\\": [\\\"A0201\\\", \\\"A0301\\\", \\\"B0702\\\"],\\n\",\n            \"    ...        \\\"sample2\\\": [\\\"A0101\\\", \\\"C0202\\\"],\\n\",\n            \"    ...    },\\n\",\n            \"    ...    verbose=0)\\n\",\n            \"        peptide n_flank c_flank  peptide_num sample_name   affinity best_allele  processing_score  presentation_score  presentation_percentile\\n\",\n            \"    0  SIINFEKL     NNN     CCC            0     sample1  11927.161       A0201             0.838               0.145                    2.282\\n\",\n            \"    1   PEPTIDE     SNS     CNC            1     sample1  32507.082       A0201             0.025               0.003                  100.000\\n\",\n            \"    2  SIINFEKL     NNN     CCC            0     sample2   2725.593       C0202             0.838               0.416                    1.017\\n\",\n            \"    3   PEPTIDE     SNS     CNC            1     sample2  28304.338       C0202             0.025               0.003                   99.287\\n\",\n            \"    \\n\",\n            \"    You can also specify sample_names, in which case peptide is evaluated\\n\",\n            \"    for binding the alleles in the corresponding sample only. See\\n\",\n            \"    `predict_affinity` for an examples.\\n\",\n            \"    \\n\",\n            \"    Parameters\\n\",\n            \"    ----------\\n\",\n            \"    peptides : list of string\\n\",\n            \"        Peptide sequences\\n\",\n            \"    alleles : list of string or dict of string -> list of string\\n\",\n            \"        If you are predicting for a single sample, pass a list of strings\\n\",\n            \"        (up to 6) indicating the genotype. If you are predicting across\\n\",\n            \"        multiple samples, pass a dict where the keys are (arbitrary)\\n\",\n            \"        sample names and the values are the alleles to predict for that\\n\",\n            \"        sample. Set to an empty list or dict to perform processing\\n\",\n            \"        prediction only.\\n\",\n            \"    sample_names : list of string [same length as peptides]\\n\",\n            \"        If you are passing a dict for 'alleles', you can use this\\n\",\n            \"        argument to specify which peptides go with which samples. If it is\\n\",\n            \"        None, then predictions will be performed for each peptide across all\\n\",\n            \"        samples.\\n\",\n            \"    n_flanks : list of string [same length as peptides]\\n\",\n            \"        Upstream sequences before the peptide. Sequences of any length can\\n\",\n            \"        be given and a suffix of the size supported by the model will be\\n\",\n            \"        used.\\n\",\n            \"    c_flanks : list of string [same length as peptides]\\n\",\n            \"        Downstream sequences after the peptide. Sequences of any length can\\n\",\n            \"        be given and a prefix of the size supported by the model will be\\n\",\n            \"        used.\\n\",\n            \"    include_affinity_percentile : bool\\n\",\n            \"        Whether to include affinity percentile ranks\\n\",\n            \"    verbose : int\\n\",\n            \"        Set to 0 for quiet.\\n\",\n            \"    throw : verbose\\n\",\n            \"        Whether to throw exception (vs. just log a warning) on invalid\\n\",\n            \"        peptides, etc.\\n\",\n            \"    \\n\",\n            \"    Returns\\n\",\n            \"    -------\\n\",\n            \"    pandas.DataFrame\\n\",\n            \"    \\n\",\n            \"    Presentation scores and intermediate results.\\n\",\n            \"\\n\"\n          ]\n        }\n      ]\n    },\n    {\n      \"cell_type\": \"markdown\",\n      \"source\": [\n        \"# Predict by scanning across protein sequences\"\n      ],\n      \"metadata\": {\n        \"id\": \"TAJlyQ-4axay\"\n      }\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"# Paste your fasta here\\n\",\n        \"proteins_fasta = \\\"\\\"\\\"\\n\",\n        \">tr|A0A6B9WFC7|A0A6B9WFC7_SARS2 Envelope small membrane protein\\n\",\n        \"MYSFVSEETGTLIVNSVLLFLAFVVFLLVTLAILTALRLCAYCCNIVNVSLVKPSFYVYS\\n\",\n        \"RVKNLNSSRVPDLLV\\n\",\n        \">tr|A0A6B9W0L4|A0A6B9W0L4_SARS2 ORF6 protein\\n\",\n        \"MFHLVDFQVTIAEILLIIMRTFKVSIWNLDYIINLIIKNLSKSLTENKYSQLDEEQPMEI\\n\",\n        \"D\\n\",\n        \">tr|A0A6G7S6S0|A0A6G7S6S0_SARS2 Nonstructural protein NS3\\n\",\n        \"MDLFMRIFTIGTVTLKQGEIKDATPSDFVRATATIPIQASLPFGWLIVGVALLAVFQSAS\\n\",\n        \"KIITLKKRWQLALSKGVHFVCNLLLLFVTVYSHLLLVAAGLEAPFLYLYALVYFLQSINF\\n\",\n        \"VRIIMRLWLCWKCRSKNPLLYDANYFLCWHTNCYDYCIPYNSVTSSIVITSGDGTTSPIS\\n\",\n        \"EHDYQIGGYTEKWESGVKDCVVLHSYFTSDYYQLYSTQLSTDTGVEHVTFFIYNKIVDEP\\n\",\n        \"EEHVQIHTIDGSSGVVNPVMEPIYDEPTTTTSVPL\\n\",\n        \">tr|A0A6B9VLF3|A0A6B9VLF3_SARS2 Membrane protein\\n\",\n        \"MADSNGTITVEELKKLLEQWNLVIGFLFLTWICLLQFAYANRNRFLYIIKLIFLWLLWPV\\n\",\n        \"TLACFVLAAVYRINWITGGIAIAMACLVGLMWLSYFIASFRLFARTRSMWSFNPETNILL\\n\",\n        \"NVPLHGTILTRPLLESELVIGAVILRGHLRIAGHHLGRCDIKDLPKEITVATSRTLSYYK\\n\",\n        \"LGASQRVAGDSGFAAYSRYRIGNYKLNTDHSSSSDNIALLVQ\\n\",\n        \"\\\"\\\"\\\"\\n\",\n        \"\\n\",\n        \"import mhcflurry.fasta\\n\",\n        \"\\n\",\n        \"with open(\\\"temp.fa\\\", \\\"w\\\") as fd:\\n\",\n        \"    fd.write(proteins_fasta)\\n\",\n        \"\\n\",\n        \"proteins = mhcflurry.fasta.read_fasta_to_dataframe(\\\"temp.fa\\\").set_index(\\\"sequence_id\\\")\\n\",\n        \"proteins\"\n      ],\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\",\n          \"height\": 206\n        },\n        \"id\": \"IORcEbHkbBsJ\",\n        \"outputId\": \"db4bb0bd-0602-4452-dd4e-55dbfa7cb90d\"\n      },\n      \"execution_count\": 7,\n      \"outputs\": [\n        {\n          \"output_type\": \"execute_result\",\n          \"data\": {\n            \"text/plain\": [\n              \"                                                                         sequence\\n\",\n              \"sequence_id                                                                      \\n\",\n              \"tr|A0A6B9WFC7|A0A6B9WFC7_SARS2  MYSFVSEETGTLIVNSVLLFLAFVVFLLVTLAILTALRLCAYCCNI...\\n\",\n              \"tr|A0A6B9W0L4|A0A6B9W0L4_SARS2  MFHLVDFQVTIAEILLIIMRTFKVSIWNLDYIINLIIKNLSKSLTE...\\n\",\n              \"tr|A0A6G7S6S0|A0A6G7S6S0_SARS2  MDLFMRIFTIGTVTLKQGEIKDATPSDFVRATATIPIQASLPFGWL...\\n\",\n              \"tr|A0A6B9VLF3|A0A6B9VLF3_SARS2  MADSNGTITVEELKKLLEQWNLVIGFLFLTWICLLQFAYANRNRFL...\"\n            ],\n            \"text/html\": [\n              \"\\n\",\n              \"  <div id=\\\"df-62abe4e0-7cd3-4f1e-84eb-93cae45f41a3\\\">\\n\",\n              \"    <div class=\\\"colab-df-container\\\">\\n\",\n              \"      <div>\\n\",\n              \"<style scoped>\\n\",\n              \"    .dataframe tbody tr th:only-of-type {\\n\",\n              \"        vertical-align: middle;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .dataframe tbody tr th {\\n\",\n              \"        vertical-align: top;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .dataframe thead th {\\n\",\n              \"        text-align: right;\\n\",\n              \"    }\\n\",\n              \"</style>\\n\",\n              \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n              \"  <thead>\\n\",\n              \"    <tr style=\\\"text-align: right;\\\">\\n\",\n              \"      <th></th>\\n\",\n              \"      <th>sequence</th>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>sequence_id</th>\\n\",\n              \"      <th></th>\\n\",\n              \"    </tr>\\n\",\n              \"  </thead>\\n\",\n              \"  <tbody>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>tr|A0A6B9WFC7|A0A6B9WFC7_SARS2</th>\\n\",\n              \"      <td>MYSFVSEETGTLIVNSVLLFLAFVVFLLVTLAILTALRLCAYCCNI...</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>tr|A0A6B9W0L4|A0A6B9W0L4_SARS2</th>\\n\",\n              \"      <td>MFHLVDFQVTIAEILLIIMRTFKVSIWNLDYIINLIIKNLSKSLTE...</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>tr|A0A6G7S6S0|A0A6G7S6S0_SARS2</th>\\n\",\n              \"      <td>MDLFMRIFTIGTVTLKQGEIKDATPSDFVRATATIPIQASLPFGWL...</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>tr|A0A6B9VLF3|A0A6B9VLF3_SARS2</th>\\n\",\n              \"      <td>MADSNGTITVEELKKLLEQWNLVIGFLFLTWICLLQFAYANRNRFL...</td>\\n\",\n              \"    </tr>\\n\",\n              \"  </tbody>\\n\",\n              \"</table>\\n\",\n              \"</div>\\n\",\n              \"      <button class=\\\"colab-df-convert\\\" onclick=\\\"convertToInteractive('df-62abe4e0-7cd3-4f1e-84eb-93cae45f41a3')\\\"\\n\",\n              \"              title=\\\"Convert this dataframe to an interactive table.\\\"\\n\",\n              \"              style=\\\"display:none;\\\">\\n\",\n              \"        \\n\",\n              \"  <svg xmlns=\\\"http://www.w3.org/2000/svg\\\" height=\\\"24px\\\"viewBox=\\\"0 0 24 24\\\"\\n\",\n              \"       width=\\\"24px\\\">\\n\",\n              \"    <path d=\\\"M0 0h24v24H0V0z\\\" fill=\\\"none\\\"/>\\n\",\n              \"    <path d=\\\"M18.56 5.44l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94zm-11 1L8.5 8.5l.94-2.06 2.06-.94-2.06-.94L8.5 2.5l-.94 2.06-2.06.94zm10 10l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94z\\\"/><path d=\\\"M17.41 7.96l-1.37-1.37c-.4-.4-.92-.59-1.43-.59-.52 0-1.04.2-1.43.59L10.3 9.45l-7.72 7.72c-.78.78-.78 2.05 0 2.83L4 21.41c.39.39.9.59 1.41.59.51 0 1.02-.2 1.41-.59l7.78-7.78 2.81-2.81c.8-.78.8-2.07 0-2.86zM5.41 20L4 18.59l7.72-7.72 1.47 1.35L5.41 20z\\\"/>\\n\",\n              \"  </svg>\\n\",\n              \"      </button>\\n\",\n              \"      \\n\",\n              \"  <style>\\n\",\n              \"    .colab-df-container {\\n\",\n              \"      display:flex;\\n\",\n              \"      flex-wrap:wrap;\\n\",\n              \"      gap: 12px;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .colab-df-convert {\\n\",\n              \"      background-color: #E8F0FE;\\n\",\n              \"      border: none;\\n\",\n              \"      border-radius: 50%;\\n\",\n              \"      cursor: pointer;\\n\",\n              \"      display: none;\\n\",\n              \"      fill: #1967D2;\\n\",\n              \"      height: 32px;\\n\",\n              \"      padding: 0 0 0 0;\\n\",\n              \"      width: 32px;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .colab-df-convert:hover {\\n\",\n              \"      background-color: #E2EBFA;\\n\",\n              \"      box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\\n\",\n              \"      fill: #174EA6;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    [theme=dark] .colab-df-convert {\\n\",\n              \"      background-color: #3B4455;\\n\",\n              \"      fill: #D2E3FC;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    [theme=dark] .colab-df-convert:hover {\\n\",\n              \"      background-color: #434B5C;\\n\",\n              \"      box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\\n\",\n              \"      filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\\n\",\n              \"      fill: #FFFFFF;\\n\",\n              \"    }\\n\",\n              \"  </style>\\n\",\n              \"\\n\",\n              \"      <script>\\n\",\n              \"        const buttonEl =\\n\",\n              \"          document.querySelector('#df-62abe4e0-7cd3-4f1e-84eb-93cae45f41a3 button.colab-df-convert');\\n\",\n              \"        buttonEl.style.display =\\n\",\n              \"          google.colab.kernel.accessAllowed ? 'block' : 'none';\\n\",\n              \"\\n\",\n              \"        async function convertToInteractive(key) {\\n\",\n              \"          const element = document.querySelector('#df-62abe4e0-7cd3-4f1e-84eb-93cae45f41a3');\\n\",\n              \"          const dataTable =\\n\",\n              \"            await google.colab.kernel.invokeFunction('convertToInteractive',\\n\",\n              \"                                                     [key], {});\\n\",\n              \"          if (!dataTable) return;\\n\",\n              \"\\n\",\n              \"          const docLinkHtml = 'Like what you see? Visit the ' +\\n\",\n              \"            '<a target=\\\"_blank\\\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\\n\",\n              \"            + ' to learn more about interactive tables.';\\n\",\n              \"          element.innerHTML = '';\\n\",\n              \"          dataTable['output_type'] = 'display_data';\\n\",\n              \"          await google.colab.output.renderOutput(dataTable, element);\\n\",\n              \"          const docLink = document.createElement('div');\\n\",\n              \"          docLink.innerHTML = docLinkHtml;\\n\",\n              \"          element.appendChild(docLink);\\n\",\n              \"        }\\n\",\n              \"      </script>\\n\",\n              \"    </div>\\n\",\n              \"  </div>\\n\",\n              \"  \"\n            ]\n          },\n          \"metadata\": {},\n          \"execution_count\": 7\n        }\n      ]\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"# Define alleles for each sample\\n\",\n        \"alleles={\\n\",\n        \"    \\\"my-sample\\\": [\\\"A0201\\\", \\\"A0301\\\", \\\"B0702\\\", \\\"C0802\\\"],\\n\",\n        \"}\"\n      ],\n      \"metadata\": {\n        \"id\": \"zwPHH09RcgCt\"\n      },\n      \"execution_count\": 8,\n      \"outputs\": []\n    },\n    {\n      \"cell_type\": \"code\",\n      \"execution_count\": 9,\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\",\n          \"height\": 559\n        },\n        \"id\": \"tYRpxn5YRGBk\",\n        \"outputId\": \"7b07a064-fcdf-4fec-d05a-242fdda2a6ad\"\n      },\n      \"outputs\": [\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stdout\",\n          \"text\": [\n            \"Predicting processing.\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stderr\",\n          \"text\": [\n            \"100%|██████████| 1/1 [00:13<00:00, 13.82s/it]\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stdout\",\n          \"text\": [\n            \"Predicting affinities.\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stderr\",\n          \"text\": [\n            \"100%|██████████| 4/4 [00:06<00:00,  1.62s/it]\\n\"\n          ]\n        },\n        {\n          \"output_type\": \"execute_result\",\n          \"data\": {\n            \"text/plain\": [\n              \"                      sequence_name  pos     peptide n_flank c_flank  \\\\\\n\",\n              \"0    tr|A0A6G7S6S0|A0A6G7S6S0_SARS2  138   LLYDANYFL   RSKNP   CWHTN   \\n\",\n              \"1    tr|A0A6G7S6S0|A0A6G7S6S0_SARS2  106   YLYALVYFL   EAPFL   QSINF   \\n\",\n              \"2    tr|A0A6G7S6S0|A0A6G7S6S0_SARS2   71   ALSKGVHFV   KRWQL   CNLLL   \\n\",\n              \"3    tr|A0A6B9WFC7|A0A6B9WFC7_SARS2   49   SLVKPSFYV   NIVNV   YSRVK   \\n\",\n              \"4    tr|A0A6B9WFC7|A0A6B9WFC7_SARS2   19   FLAFVVFLL   NSVLL   VTLAI   \\n\",\n              \"..                              ...  ...         ...     ...     ...   \\n\",\n              \"188  tr|A0A6B9WFC7|A0A6B9WFC7_SARS2   15  SVLLFLAFVV   TLIVN   FLLVT   \\n\",\n              \"189  tr|A0A6G7S6S0|A0A6G7S6S0_SARS2   57    SASKIITL   LAVFQ   KKRWQ   \\n\",\n              \"190  tr|A0A6G7S6S0|A0A6G7S6S0_SARS2  169  TSGDGTTSPI   SSIVI   SEHDY   \\n\",\n              \"191  tr|A0A6B9VLF3|A0A6B9VLF3_SARS2   71   RINWITGGI   LAAVY   AIAMA   \\n\",\n              \"192  tr|A0A6G7S6S0|A0A6G7S6S0_SARS2    1   DLFMRIFTI       M   GTVTL   \\n\",\n              \"\\n\",\n              \"    sample_name    affinity best_allele  affinity_percentile  \\\\\\n\",\n              \"0     my-sample   10.659104       A0201             0.003625   \\n\",\n              \"1     my-sample   11.053785       A0201             0.006750   \\n\",\n              \"2     my-sample   11.501204       A0201             0.011500   \\n\",\n              \"3     my-sample   11.930823       A0201             0.013500   \\n\",\n              \"4     my-sample   12.318483       A0201             0.015875   \\n\",\n              \"..          ...         ...         ...                  ...   \\n\",\n              \"188   my-sample  466.913027       A0201             1.297625   \\n\",\n              \"189   my-sample  471.300226       C0802             0.774375   \\n\",\n              \"190   my-sample  473.865753       C0802             0.774375   \\n\",\n              \"191   my-sample  475.852826       A0201             1.306500   \\n\",\n              \"192   my-sample  498.907589       A0201             1.329875   \\n\",\n              \"\\n\",\n              \"     processing_score  presentation_score  presentation_percentile  \\n\",\n              \"0            0.157175            0.921852                 0.088804  \\n\",\n              \"1            0.014756            0.868851                 0.171848  \\n\",\n              \"2            0.676803            0.987502                 0.002065  \\n\",\n              \"3            0.091771            0.891807                 0.135353  \\n\",\n              \"4            0.007210            0.852791                 0.196277  \\n\",\n              \"..                ...                 ...                      ...  \\n\",\n              \"188          0.010083            0.145307                 2.271005  \\n\",\n              \"189          0.772850            0.753873                 0.351359  \\n\",\n              \"190          0.000247            0.138992                 2.345462  \\n\",\n              \"191          0.166255            0.232094                 1.656413  \\n\",\n              \"192          0.972156            0.860781                 0.184022  \\n\",\n              \"\\n\",\n              \"[193 rows x 12 columns]\"\n            ],\n            \"text/html\": [\n              \"\\n\",\n              \"  <div id=\\\"df-5f2fb820-3c51-4916-ad31-6354fba87d6a\\\">\\n\",\n              \"    <div class=\\\"colab-df-container\\\">\\n\",\n              \"      <div>\\n\",\n              \"<style scoped>\\n\",\n              \"    .dataframe tbody tr th:only-of-type {\\n\",\n              \"        vertical-align: middle;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .dataframe tbody tr th {\\n\",\n              \"        vertical-align: top;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .dataframe thead th {\\n\",\n              \"        text-align: right;\\n\",\n              \"    }\\n\",\n              \"</style>\\n\",\n              \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n              \"  <thead>\\n\",\n              \"    <tr style=\\\"text-align: right;\\\">\\n\",\n              \"      <th></th>\\n\",\n              \"      <th>sequence_name</th>\\n\",\n              \"      <th>pos</th>\\n\",\n              \"      <th>peptide</th>\\n\",\n              \"      <th>n_flank</th>\\n\",\n              \"      <th>c_flank</th>\\n\",\n              \"      <th>sample_name</th>\\n\",\n              \"      <th>affinity</th>\\n\",\n              \"      <th>best_allele</th>\\n\",\n              \"      <th>affinity_percentile</th>\\n\",\n              \"      <th>processing_score</th>\\n\",\n              \"      <th>presentation_score</th>\\n\",\n              \"      <th>presentation_percentile</th>\\n\",\n              \"    </tr>\\n\",\n              \"  </thead>\\n\",\n              \"  <tbody>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>0</th>\\n\",\n              \"      <td>tr|A0A6G7S6S0|A0A6G7S6S0_SARS2</td>\\n\",\n              \"      <td>138</td>\\n\",\n              \"      <td>LLYDANYFL</td>\\n\",\n              \"      <td>RSKNP</td>\\n\",\n              \"      <td>CWHTN</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>10.659104</td>\\n\",\n              \"      <td>A0201</td>\\n\",\n              \"      <td>0.003625</td>\\n\",\n              \"      <td>0.157175</td>\\n\",\n              \"      <td>0.921852</td>\\n\",\n              \"      <td>0.088804</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>1</th>\\n\",\n              \"      <td>tr|A0A6G7S6S0|A0A6G7S6S0_SARS2</td>\\n\",\n              \"      <td>106</td>\\n\",\n              \"      <td>YLYALVYFL</td>\\n\",\n              \"      <td>EAPFL</td>\\n\",\n              \"      <td>QSINF</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>11.053785</td>\\n\",\n              \"      <td>A0201</td>\\n\",\n              \"      <td>0.006750</td>\\n\",\n              \"      <td>0.014756</td>\\n\",\n              \"      <td>0.868851</td>\\n\",\n              \"      <td>0.171848</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>2</th>\\n\",\n              \"      <td>tr|A0A6G7S6S0|A0A6G7S6S0_SARS2</td>\\n\",\n              \"      <td>71</td>\\n\",\n              \"      <td>ALSKGVHFV</td>\\n\",\n              \"      <td>KRWQL</td>\\n\",\n              \"      <td>CNLLL</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>11.501204</td>\\n\",\n              \"      <td>A0201</td>\\n\",\n              \"      <td>0.011500</td>\\n\",\n              \"      <td>0.676803</td>\\n\",\n              \"      <td>0.987502</td>\\n\",\n              \"      <td>0.002065</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>3</th>\\n\",\n              \"      <td>tr|A0A6B9WFC7|A0A6B9WFC7_SARS2</td>\\n\",\n              \"      <td>49</td>\\n\",\n              \"      <td>SLVKPSFYV</td>\\n\",\n              \"      <td>NIVNV</td>\\n\",\n              \"      <td>YSRVK</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>11.930823</td>\\n\",\n              \"      <td>A0201</td>\\n\",\n              \"      <td>0.013500</td>\\n\",\n              \"      <td>0.091771</td>\\n\",\n              \"      <td>0.891807</td>\\n\",\n              \"      <td>0.135353</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>4</th>\\n\",\n              \"      <td>tr|A0A6B9WFC7|A0A6B9WFC7_SARS2</td>\\n\",\n              \"      <td>19</td>\\n\",\n              \"      <td>FLAFVVFLL</td>\\n\",\n              \"      <td>NSVLL</td>\\n\",\n              \"      <td>VTLAI</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>12.318483</td>\\n\",\n              \"      <td>A0201</td>\\n\",\n              \"      <td>0.015875</td>\\n\",\n              \"      <td>0.007210</td>\\n\",\n              \"      <td>0.852791</td>\\n\",\n              \"      <td>0.196277</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>...</th>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"      <td>...</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>188</th>\\n\",\n              \"      <td>tr|A0A6B9WFC7|A0A6B9WFC7_SARS2</td>\\n\",\n              \"      <td>15</td>\\n\",\n              \"      <td>SVLLFLAFVV</td>\\n\",\n              \"      <td>TLIVN</td>\\n\",\n              \"      <td>FLLVT</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>466.913027</td>\\n\",\n              \"      <td>A0201</td>\\n\",\n              \"      <td>1.297625</td>\\n\",\n              \"      <td>0.010083</td>\\n\",\n              \"      <td>0.145307</td>\\n\",\n              \"      <td>2.271005</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>189</th>\\n\",\n              \"      <td>tr|A0A6G7S6S0|A0A6G7S6S0_SARS2</td>\\n\",\n              \"      <td>57</td>\\n\",\n              \"      <td>SASKIITL</td>\\n\",\n              \"      <td>LAVFQ</td>\\n\",\n              \"      <td>KKRWQ</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>471.300226</td>\\n\",\n              \"      <td>C0802</td>\\n\",\n              \"      <td>0.774375</td>\\n\",\n              \"      <td>0.772850</td>\\n\",\n              \"      <td>0.753873</td>\\n\",\n              \"      <td>0.351359</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>190</th>\\n\",\n              \"      <td>tr|A0A6G7S6S0|A0A6G7S6S0_SARS2</td>\\n\",\n              \"      <td>169</td>\\n\",\n              \"      <td>TSGDGTTSPI</td>\\n\",\n              \"      <td>SSIVI</td>\\n\",\n              \"      <td>SEHDY</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>473.865753</td>\\n\",\n              \"      <td>C0802</td>\\n\",\n              \"      <td>0.774375</td>\\n\",\n              \"      <td>0.000247</td>\\n\",\n              \"      <td>0.138992</td>\\n\",\n              \"      <td>2.345462</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>191</th>\\n\",\n              \"      <td>tr|A0A6B9VLF3|A0A6B9VLF3_SARS2</td>\\n\",\n              \"      <td>71</td>\\n\",\n              \"      <td>RINWITGGI</td>\\n\",\n              \"      <td>LAAVY</td>\\n\",\n              \"      <td>AIAMA</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>475.852826</td>\\n\",\n              \"      <td>A0201</td>\\n\",\n              \"      <td>1.306500</td>\\n\",\n              \"      <td>0.166255</td>\\n\",\n              \"      <td>0.232094</td>\\n\",\n              \"      <td>1.656413</td>\\n\",\n              \"    </tr>\\n\",\n              \"    <tr>\\n\",\n              \"      <th>192</th>\\n\",\n              \"      <td>tr|A0A6G7S6S0|A0A6G7S6S0_SARS2</td>\\n\",\n              \"      <td>1</td>\\n\",\n              \"      <td>DLFMRIFTI</td>\\n\",\n              \"      <td>M</td>\\n\",\n              \"      <td>GTVTL</td>\\n\",\n              \"      <td>my-sample</td>\\n\",\n              \"      <td>498.907589</td>\\n\",\n              \"      <td>A0201</td>\\n\",\n              \"      <td>1.329875</td>\\n\",\n              \"      <td>0.972156</td>\\n\",\n              \"      <td>0.860781</td>\\n\",\n              \"      <td>0.184022</td>\\n\",\n              \"    </tr>\\n\",\n              \"  </tbody>\\n\",\n              \"</table>\\n\",\n              \"<p>193 rows × 12 columns</p>\\n\",\n              \"</div>\\n\",\n              \"      <button class=\\\"colab-df-convert\\\" onclick=\\\"convertToInteractive('df-5f2fb820-3c51-4916-ad31-6354fba87d6a')\\\"\\n\",\n              \"              title=\\\"Convert this dataframe to an interactive table.\\\"\\n\",\n              \"              style=\\\"display:none;\\\">\\n\",\n              \"        \\n\",\n              \"  <svg xmlns=\\\"http://www.w3.org/2000/svg\\\" height=\\\"24px\\\"viewBox=\\\"0 0 24 24\\\"\\n\",\n              \"       width=\\\"24px\\\">\\n\",\n              \"    <path d=\\\"M0 0h24v24H0V0z\\\" fill=\\\"none\\\"/>\\n\",\n              \"    <path d=\\\"M18.56 5.44l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94zm-11 1L8.5 8.5l.94-2.06 2.06-.94-2.06-.94L8.5 2.5l-.94 2.06-2.06.94zm10 10l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94z\\\"/><path d=\\\"M17.41 7.96l-1.37-1.37c-.4-.4-.92-.59-1.43-.59-.52 0-1.04.2-1.43.59L10.3 9.45l-7.72 7.72c-.78.78-.78 2.05 0 2.83L4 21.41c.39.39.9.59 1.41.59.51 0 1.02-.2 1.41-.59l7.78-7.78 2.81-2.81c.8-.78.8-2.07 0-2.86zM5.41 20L4 18.59l7.72-7.72 1.47 1.35L5.41 20z\\\"/>\\n\",\n              \"  </svg>\\n\",\n              \"      </button>\\n\",\n              \"      \\n\",\n              \"  <style>\\n\",\n              \"    .colab-df-container {\\n\",\n              \"      display:flex;\\n\",\n              \"      flex-wrap:wrap;\\n\",\n              \"      gap: 12px;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .colab-df-convert {\\n\",\n              \"      background-color: #E8F0FE;\\n\",\n              \"      border: none;\\n\",\n              \"      border-radius: 50%;\\n\",\n              \"      cursor: pointer;\\n\",\n              \"      display: none;\\n\",\n              \"      fill: #1967D2;\\n\",\n              \"      height: 32px;\\n\",\n              \"      padding: 0 0 0 0;\\n\",\n              \"      width: 32px;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    .colab-df-convert:hover {\\n\",\n              \"      background-color: #E2EBFA;\\n\",\n              \"      box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\\n\",\n              \"      fill: #174EA6;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    [theme=dark] .colab-df-convert {\\n\",\n              \"      background-color: #3B4455;\\n\",\n              \"      fill: #D2E3FC;\\n\",\n              \"    }\\n\",\n              \"\\n\",\n              \"    [theme=dark] .colab-df-convert:hover {\\n\",\n              \"      background-color: #434B5C;\\n\",\n              \"      box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\\n\",\n              \"      filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\\n\",\n              \"      fill: #FFFFFF;\\n\",\n              \"    }\\n\",\n              \"  </style>\\n\",\n              \"\\n\",\n              \"      <script>\\n\",\n              \"        const buttonEl =\\n\",\n              \"          document.querySelector('#df-5f2fb820-3c51-4916-ad31-6354fba87d6a button.colab-df-convert');\\n\",\n              \"        buttonEl.style.display =\\n\",\n              \"          google.colab.kernel.accessAllowed ? 'block' : 'none';\\n\",\n              \"\\n\",\n              \"        async function convertToInteractive(key) {\\n\",\n              \"          const element = document.querySelector('#df-5f2fb820-3c51-4916-ad31-6354fba87d6a');\\n\",\n              \"          const dataTable =\\n\",\n              \"            await google.colab.kernel.invokeFunction('convertToInteractive',\\n\",\n              \"                                                     [key], {});\\n\",\n              \"          if (!dataTable) return;\\n\",\n              \"\\n\",\n              \"          const docLinkHtml = 'Like what you see? Visit the ' +\\n\",\n              \"            '<a target=\\\"_blank\\\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\\n\",\n              \"            + ' to learn more about interactive tables.';\\n\",\n              \"          element.innerHTML = '';\\n\",\n              \"          dataTable['output_type'] = 'display_data';\\n\",\n              \"          await google.colab.output.renderOutput(dataTable, element);\\n\",\n              \"          const docLink = document.createElement('div');\\n\",\n              \"          docLink.innerHTML = docLinkHtml;\\n\",\n              \"          element.appendChild(docLink);\\n\",\n              \"        }\\n\",\n              \"      </script>\\n\",\n              \"    </div>\\n\",\n              \"  </div>\\n\",\n              \"  \"\n            ]\n          },\n          \"metadata\": {},\n          \"execution_count\": 9\n        }\n      ],\n      \"source\": [\n        \"# Predict across protein sequences and return peptides with predicted affinity\\n\",\n        \"# less than 500 nM.\\n\",\n        \"results2 = predictor.predict_sequences(\\n\",\n        \"    sequences=proteins.sequence.to_dict(),\\n\",\n        \"    alleles=alleles,\\n\",\n        \"    result=\\\"filtered\\\",\\n\",\n        \"    comparison_quantity=\\\"affinity\\\",\\n\",\n        \"    filter_value=500)\\n\",\n        \"results2\"\n      ]\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"# Download results\\n\",\n        \"results2.to_csv('mhcflurry-results.csv')\\n\",\n        \"files.download('mhcflurry-results.csv')\"\n      ],\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\",\n          \"height\": 17\n        },\n        \"id\": \"CASrg0X5dgon\",\n        \"outputId\": \"8d333984-00cc-44d2-f5f4-6308fd7348a0\"\n      },\n      \"execution_count\": 10,\n      \"outputs\": [\n        {\n          \"output_type\": \"display_data\",\n          \"data\": {\n            \"text/plain\": [\n              \"<IPython.core.display.Javascript object>\"\n            ],\n            \"application/javascript\": [\n              \"\\n\",\n              \"    async function download(id, filename, size) {\\n\",\n              \"      if (!google.colab.kernel.accessAllowed) {\\n\",\n              \"        return;\\n\",\n              \"      }\\n\",\n              \"      const div = document.createElement('div');\\n\",\n              \"      const label = document.createElement('label');\\n\",\n              \"      label.textContent = `Downloading \\\"${filename}\\\": `;\\n\",\n              \"      div.appendChild(label);\\n\",\n              \"      const progress = document.createElement('progress');\\n\",\n              \"      progress.max = size;\\n\",\n              \"      div.appendChild(progress);\\n\",\n              \"      document.body.appendChild(div);\\n\",\n              \"\\n\",\n              \"      const buffers = [];\\n\",\n              \"      let downloaded = 0;\\n\",\n              \"\\n\",\n              \"      const channel = await google.colab.kernel.comms.open(id);\\n\",\n              \"      // Send a message to notify the kernel that we're ready.\\n\",\n              \"      channel.send({})\\n\",\n              \"\\n\",\n              \"      for await (const message of channel.messages) {\\n\",\n              \"        // Send a message to notify the kernel that we're ready.\\n\",\n              \"        channel.send({})\\n\",\n              \"        if (message.buffers) {\\n\",\n              \"          for (const buffer of message.buffers) {\\n\",\n              \"            buffers.push(buffer);\\n\",\n              \"            downloaded += buffer.byteLength;\\n\",\n              \"            progress.value = downloaded;\\n\",\n              \"          }\\n\",\n              \"        }\\n\",\n              \"      }\\n\",\n              \"      const blob = new Blob(buffers, {type: 'application/binary'});\\n\",\n              \"      const a = document.createElement('a');\\n\",\n              \"      a.href = window.URL.createObjectURL(blob);\\n\",\n              \"      a.download = filename;\\n\",\n              \"      div.appendChild(a);\\n\",\n              \"      a.click();\\n\",\n              \"      div.remove();\\n\",\n              \"    }\\n\",\n              \"  \"\n            ]\n          },\n          \"metadata\": {}\n        },\n        {\n          \"output_type\": \"display_data\",\n          \"data\": {\n            \"text/plain\": [\n              \"<IPython.core.display.Javascript object>\"\n            ],\n            \"application/javascript\": [\n              \"download(\\\"download_b220af07-7880-4f63-93dd-8780f762a0a9\\\", \\\"mhcflurry-results.csv\\\", 32592)\"\n            ]\n          },\n          \"metadata\": {}\n        }\n      ]\n    },\n    {\n      \"cell_type\": \"code\",\n      \"source\": [\n        \"# See help for more options:\\n\",\n        \"help(predictor.predict_sequences)\"\n      ],\n      \"metadata\": {\n        \"colab\": {\n          \"base_uri\": \"https://localhost:8080/\"\n        },\n        \"id\": \"RtbpRsdTdMRL\",\n        \"outputId\": \"dda97aff-cdb7-4d8c-d950-df13a7aecbd6\"\n      },\n      \"execution_count\": 11,\n      \"outputs\": [\n        {\n          \"output_type\": \"stream\",\n          \"name\": \"stdout\",\n          \"text\": [\n            \"Help on method predict_sequences in module mhcflurry.class1_presentation_predictor:\\n\",\n            \"\\n\",\n            \"predict_sequences(sequences, alleles, result='best', comparison_quantity=None, filter_value=None, peptide_lengths=(8, 9, 10, 11), use_flanks=True, include_affinity_percentile=True, verbose=1, throw=True) method of mhcflurry.class1_presentation_predictor.Class1PresentationPredictor instance\\n\",\n            \"    Predict presentation across protein sequences.\\n\",\n            \"    \\n\",\n            \"    Example:\\n\",\n            \"    \\n\",\n            \"    >>> predictor = Class1PresentationPredictor.load()\\n\",\n            \"    >>> predictor.predict_sequences(\\n\",\n            \"    ...    sequences={\\n\",\n            \"    ...        'protein1': \\\"MDSKGSSQKGSRLLLLLVVSNLL\\\",\\n\",\n            \"    ...        'protein2': \\\"SSLPTPEDKEQAQQTHH\\\",\\n\",\n            \"    ...    },\\n\",\n            \"    ...    alleles={\\n\",\n            \"    ...        \\\"sample1\\\": [\\\"A0201\\\", \\\"A0301\\\", \\\"B0702\\\"],\\n\",\n            \"    ...        \\\"sample2\\\": [\\\"A0101\\\", \\\"C0202\\\"],\\n\",\n            \"    ...    },\\n\",\n            \"    ...    result=\\\"filtered\\\",\\n\",\n            \"    ...    comparison_quantity=\\\"affinity\\\",\\n\",\n            \"    ...    filter_value=500,\\n\",\n            \"    ...    verbose=0)\\n\",\n            \"      sequence_name  pos     peptide n_flank c_flank sample_name  affinity best_allele  affinity_percentile  processing_score  presentation_score  presentation_percentile\\n\",\n            \"    0      protein1   14   LLLVVSNLL   GSRLL             sample1    57.180       A0201                0.398             0.233               0.754                    0.351\\n\",\n            \"    1      protein1   13   LLLLVVSNL   KGSRL       L     sample1    57.339       A0201                0.398             0.031               0.586                    0.643\\n\",\n            \"    2      protein1    5   SSQKGSRLL   MDSKG   LLLVV     sample2   110.779       C0202                0.782             0.061               0.456                    0.920\\n\",\n            \"    3      protein1    6   SQKGSRLLL   DSKGS   LLVVS     sample2   254.480       C0202                1.735             0.102               0.303                    1.356\\n\",\n            \"    4      protein1   13  LLLLVVSNLL   KGSRL             sample1   260.390       A0201                1.012             0.158               0.345                    1.215\\n\",\n            \"    5      protein1   12  LLLLLVVSNL   QKGSR       L     sample1   308.150       A0201                1.094             0.015               0.206                    1.802\\n\",\n            \"    6      protein2    0   SSLPTPEDK           EQAQQ     sample2   410.354       C0202                2.398             0.003               0.158                    2.155\\n\",\n            \"    7      protein1    5    SSQKGSRL   MDSKG   LLLLV     sample2   444.321       C0202                2.512             0.026               0.159                    2.138\\n\",\n            \"    8      protein2    0   SSLPTPEDK           EQAQQ     sample1   459.296       A0301                0.971             0.003               0.144                    2.292\\n\",\n            \"    9      protein1    4   GSSQKGSRL    MDSK   LLLLV     sample2   469.052       C0202                2.595             0.014               0.146                    2.261\\n\",\n            \"    \\n\",\n            \"    Parameters\\n\",\n            \"    ----------\\n\",\n            \"    sequences : str, list of string, or string -> string dict\\n\",\n            \"        Protein sequences. If a dict is given, the keys are arbitrary (\\n\",\n            \"        e.g. protein names), and the values are the amino acid sequences.\\n\",\n            \"    alleles : list of string, list of list of string, or dict of string -> list of string\\n\",\n            \"        MHC I alleles. Can be: (1) a string (a single allele), (2) a list of\\n\",\n            \"        strings (a single genotype), (3) a list of list of strings\\n\",\n            \"        (multiple genotypes, where the total number of genotypes must equal\\n\",\n            \"        the number of sequences), or (4) a dict giving multiple genotypes,\\n\",\n            \"        which will each be run over the sequences.\\n\",\n            \"    result : string\\n\",\n            \"        Specify 'best' to return the strongest peptide for each sequence,\\n\",\n            \"        'all' to return predictions for all peptides, or 'filtered' to\\n\",\n            \"        return predictions where the comparison_quantity is stronger\\n\",\n            \"        (i.e (<) for affinity, (>) for scores) than filter_value.\\n\",\n            \"    comparison_quantity : string\\n\",\n            \"        One of \\\"presentation_score\\\", \\\"processing_score\\\", \\\"affinity\\\", or\\n\",\n            \"        \\\"affinity_percentile\\\". Prediction to use to rank (if result is\\n\",\n            \"        \\\"best\\\") or filter (if result is \\\"filtered\\\") results. Default is\\n\",\n            \"        \\\"presentation_score\\\".\\n\",\n            \"    filter_value : float\\n\",\n            \"        Threshold value to use, only relevant when result is \\\"filtered\\\".\\n\",\n            \"        If comparison_quantity is \\\"affinity\\\", then all results less than\\n\",\n            \"        (i.e. tighter than) the specified nM affinity are retained. If it's\\n\",\n            \"        \\\"presentation_score\\\" or \\\"processing_score\\\" then results greater than\\n\",\n            \"        the indicated filter_value are retained.\\n\",\n            \"    peptide_lengths : list of int\\n\",\n            \"        Peptide lengths to predict for.\\n\",\n            \"    use_flanks : bool\\n\",\n            \"        Whether to include flanking sequences when running the AP predictor\\n\",\n            \"        (for better cleavage prediction).\\n\",\n            \"    include_affinity_percentile : bool\\n\",\n            \"        Whether to include affinity percentile ranks in output.\\n\",\n            \"    verbose : int\\n\",\n            \"        Set to 0 for quiet mode.\\n\",\n            \"    throw : boolean\\n\",\n            \"        Whether to throw exceptions (vs. log warnings) on invalid inputs.\\n\",\n            \"    \\n\",\n            \"    Returns\\n\",\n            \"    -------\\n\",\n            \"    pandas.DataFrame with columns:\\n\",\n            \"        peptide, n_flank, c_flank, sequence_name, affinity, best_allele,\\n\",\n            \"        processing_score, presentation_score\\n\",\n            \"\\n\"\n          ]\n        }\n      ]\n    }\n  ],\n  \"metadata\": {\n    \"kernelspec\": {\n      \"display_name\": \"Python 3\",\n      \"language\": \"python\",\n      \"name\": \"python3\"\n    },\n    \"language_info\": {\n      \"codemirror_mode\": {\n        \"name\": \"ipython\",\n        \"version\": 3\n      },\n      \"file_extension\": \".py\",\n      \"mimetype\": \"text/x-python\",\n      \"name\": \"python\",\n      \"nbconvert_exporter\": \"python\",\n      \"pygments_lexer\": \"ipython3\",\n      \"version\": \"3.6.1\"\n    },\n    \"colab\": {\n      \"name\": \"mhcflurry-colab.ipynb\",\n      \"provenance\": []\n    }\n  },\n  \"nbformat\": 4,\n  \"nbformat_minor\": 0\n}\n"
  },
  {
    "path": "pylintrc",
    "content": "[TYPECHECK]\n# Without ignoring this, we get errors like:\n# E:249,20: Module 'numpy' has no 'nan' member (no-member)\nignored-modules = numpy\n"
  },
  {
    "path": "readthedocs.yml",
    "content": "conda:\n    file: docs/environment.yml\n"
  },
  {
    "path": "requirements.txt",
    "content": "numpy>=1.22.4\npandas>=2.0\ntorch>=2.0.0\nappdirs\nscikit-learn\nmhcgnomes>=3.0.1\npyyaml\ntqdm\n"
  },
  {
    "path": "scripts/compare_tf_pytorch_random_outputs.py",
    "content": "\"\"\"\nLarge-scale TF vs PyTorch MHCflurry comparison on random peptide/allele examples.\n\nThis script is designed to run locally and keep TF/PyTorch imports isolated by\nrunning each backend in a subprocess.\n\nPrimary workflow:\n    python scripts/compare_tf_pytorch_random_outputs.py run \\\n      --tf-repo-root /tmp/mhcflurry-master-check \\\n      --num-examples 120000 \\\n      --out-dir /tmp/mhcflurry-random-parity\n\nOutputs:\n    - dataset.csv.gz\n    - pt_predictions.csv.gz\n    - tf_predictions.csv.gz\n    - diff_summary.json\n    - diff_report.txt\n    - top_outliers.csv.gz\n\"\"\"\n\nfrom __future__ import annotations\n\nimport argparse\nimport json\nimport re\nimport subprocess\nimport sys\nimport warnings\nfrom datetime import datetime, timezone\nfrom pathlib import Path\n\nimport numpy as np\nimport pandas as pd\n\nwarnings.filterwarnings(\n    \"ignore\",\n    message=\"Downcasting behavior in `replace` is deprecated.*\",\n    category=FutureWarning,\n)\n\n\nAA20 = \"ACDEFGHIKLMNPQRSTVWY\"\nDEFAULT_ALLELE_REGEX = r\"^HLA-[ABC]\\*\"\n\nBASE_COLUMNS = [\"row_id\", \"peptide\", \"allele\", \"n_flank\", \"c_flank\"]\nSTRING_OUTPUT_COLUMNS = [\"pres_with_best_allele\", \"pres_without_best_allele\"]\n\n# A compact default panel: common human class I alleles often used for broad\n# population coverage checks, plus a few representative animal alleles.\nIEDB_POPCOV_HUMAN_ALLELES = [\n    \"HLA-A*01:01\",\n    \"HLA-A*02:01\",\n    \"HLA-A*03:01\",\n    \"HLA-A*24:02\",\n    \"HLA-A*26:01\",\n    \"HLA-A*30:01\",\n    \"HLA-A*30:02\",\n    \"HLA-A*31:01\",\n    \"HLA-A*33:01\",\n    \"HLA-A*68:01\",\n    \"HLA-B*07:02\",\n    \"HLA-B*08:01\",\n    \"HLA-B*15:01\",\n    \"HLA-B*35:01\",\n    \"HLA-B*40:01\",\n    \"HLA-B*44:02\",\n    \"HLA-B*44:03\",\n    \"HLA-B*51:01\",\n    \"HLA-B*53:01\",\n    \"HLA-B*57:01\",\n    \"HLA-B*58:01\",\n    \"HLA-C*03:04\",\n    \"HLA-C*04:01\",\n    \"HLA-C*05:01\",\n    \"HLA-C*06:02\",\n    \"HLA-C*07:01\",\n    \"HLA-C*07:02\",\n    \"HLA-C*08:02\",\n    \"HLA-C*12:03\",\n    \"HLA-C*15:02\",\n]\n\nEXTRA_ANIMAL_ALLELES = [\n    \"H2-K*b\",\n    \"H2-D*b\",\n    \"H2-K*d\",\n    \"H2-L*d\",\n    \"DLA-88*01:01\",\n    \"SLA-1*04:01\",\n]\n\n\ndef _json_default(value):\n    if isinstance(value, np.generic):\n        return value.item()\n    if isinstance(value, np.ndarray):\n        return value.tolist()\n    raise TypeError(\"Object of type %s is not JSON serializable\" % type(value).__name__)\n\n\ndef _self_cmd(*args: str) -> list[str]:\n    return [sys.executable, str(Path(__file__).resolve()), *args]\n\n\ndef _run_subprocess_json(cmd: list[str]) -> dict:\n    completed = subprocess.run(cmd, check=True, capture_output=True, text=True)\n    stdout = completed.stdout.strip()\n    if not stdout:\n        raise RuntimeError(\"No JSON output from command: %s\" % \" \".join(cmd))\n    line = stdout.splitlines()[-1]\n    return json.loads(line)\n\n\ndef _run_subprocess(cmd: list[str]) -> None:\n    subprocess.run(cmd, check=True)\n\n\ndef _append_if_set(cmd: list[str], flag: str, value: str | None) -> None:\n    if value:\n        cmd.extend([flag, value])\n\n\ndef _repo_root_default() -> Path:\n    return Path(__file__).resolve().parents[1]\n\n\ndef _random_sequences(rng: np.random.Generator, lengths: np.ndarray) -> list[str]:\n    chars = np.array(list(AA20), dtype=\"<U1\")\n    result = []\n    for length in lengths.tolist():\n        if length <= 0:\n            result.append(\"\")\n        else:\n            result.append(\"\".join(rng.choice(chars, size=length)))\n    return result\n\n\ndef _generate_dataset(\n    num_examples: int,\n    alleles: list[str],\n    peptide_min_len: int,\n    peptide_max_len: int,\n    n_flank_max: int,\n    c_flank_max: int,\n    seed: int,\n) -> pd.DataFrame:\n    if not alleles:\n        raise ValueError(\"No alleles provided after filtering/intersection.\")\n\n    rng = np.random.default_rng(seed)\n    peptide_lengths = rng.integers(\n        peptide_min_len, peptide_max_len + 1, size=num_examples\n    )\n    n_flank_lengths = rng.integers(0, n_flank_max + 1, size=num_examples)\n    c_flank_lengths = rng.integers(0, c_flank_max + 1, size=num_examples)\n    allele_indices = rng.integers(0, len(alleles), size=num_examples)\n\n    df = pd.DataFrame(\n        {\n            \"row_id\": np.arange(num_examples, dtype=np.int64),\n            \"allele\": [alleles[i] for i in allele_indices.tolist()],\n            \"peptide\": _random_sequences(rng, peptide_lengths),\n            \"n_flank\": _random_sequences(rng, n_flank_lengths),\n            \"c_flank\": _random_sequences(rng, c_flank_lengths),\n        }\n    )\n    return df\n\n\ndef _apply_allele_panel(\n    allele_pool: list[str],\n    allele_panel: str,\n    allele_regex: str | None,\n) -> list[str]:\n    selected = list(allele_pool)\n    if allele_panel == \"iedb_plus_animals\":\n        requested = IEDB_POPCOV_HUMAN_ALLELES + EXTRA_ANIMAL_ALLELES\n        selected = [a for a in requested if a in selected]\n    elif allele_panel == \"all_hla\":\n        pattern = re.compile(DEFAULT_ALLELE_REGEX)\n        selected = [a for a in selected if pattern.search(a)]\n    elif allele_panel == \"all\":\n        pass\n    else:\n        raise ValueError(\"Unknown allele panel: %s\" % allele_panel)\n\n    if allele_regex:\n        pattern = re.compile(allele_regex)\n        selected = [a for a in selected if pattern.search(a)]\n    return selected\n\n\ndef cmd_backend_metadata(args: argparse.Namespace) -> None:\n    sys.path.insert(0, str(Path(args.repo_root).resolve()))\n\n    from mhcflurry import (  # pylint: disable=import-error\n        Class1AffinityPredictor,\n        Class1PresentationPredictor,\n    )\n    from mhcflurry.downloads import (  # pylint: disable=import-error\n        get_default_class1_models_dir,\n        get_default_class1_presentation_models_dir,\n        get_default_class1_processing_models_dir,\n        get_path,\n    )\n\n    class1_models_dir = args.class1_models_dir or get_default_class1_models_dir()\n    presentation_models_dir = (\n        args.presentation_models_dir or get_default_class1_presentation_models_dir()\n    )\n    processing_with_flanks_dir = (\n        args.processing_with_flanks_models_dir\n        or get_default_class1_processing_models_dir()\n    )\n    processing_without_flanks_dir = (\n        args.processing_without_flanks_models_dir\n        or get_path(\"models_class1_processing\", \"models.selected.no_flank\")\n    )\n\n    affinity_predictor = Class1AffinityPredictor.load(class1_models_dir)\n    presentation_predictor = Class1PresentationPredictor.load(presentation_models_dir)\n\n    with_flanks_lengths = (\n        presentation_predictor.processing_predictor_with_flanks.sequence_lengths\n        if presentation_predictor.processing_predictor_with_flanks is not None\n        else None\n    )\n    without_flanks_lengths = (\n        presentation_predictor.processing_predictor_without_flanks.sequence_lengths\n        if presentation_predictor.processing_predictor_without_flanks is not None\n        else None\n    )\n\n    out = {\n        \"repo_root\": str(Path(args.repo_root).resolve()),\n        \"class1_models_dir\": class1_models_dir,\n        \"presentation_models_dir\": presentation_models_dir,\n        \"processing_with_flanks_models_dir\": processing_with_flanks_dir,\n        \"processing_without_flanks_models_dir\": processing_without_flanks_dir,\n        \"supported_alleles\": sorted(affinity_predictor.supported_alleles),\n        \"alleles_with_percentile_ranks\": sorted(\n            affinity_predictor.allele_to_percent_rank_transform.keys()\n        ),\n        \"affinity_supported_peptide_lengths\": list(\n            affinity_predictor.supported_peptide_lengths\n        ),\n        \"with_flanks_lengths\": with_flanks_lengths,\n        \"without_flanks_lengths\": without_flanks_lengths,\n        \"provenance\": {\n            \"affinity\": affinity_predictor.provenance_string,\n            \"presentation\": presentation_predictor.provenance_string,\n            \"presentation_internal_affinity\": (\n                presentation_predictor.affinity_predictor.provenance_string\n            ),\n        },\n    }\n    print(json.dumps(out, default=_json_default))\n\n\ndef cmd_predict_backend(args: argparse.Namespace) -> None:\n    sys.path.insert(0, str(Path(args.repo_root).resolve()))\n    # Keep TF/Keras logs from overwhelming stdout in large runs.\n    try:\n        import logging\n\n        logging.getLogger(\"tensorflow\").setLevel(logging.ERROR)\n        from tf_keras.models import Model as _KerasModel  # type: ignore\n\n        _orig_predict = _KerasModel.predict\n\n        def _quiet_predict(self, *p_args, **p_kwargs):\n            p_kwargs.setdefault(\"verbose\", 0)\n            return _orig_predict(self, *p_args, **p_kwargs)\n\n        _KerasModel.predict = _quiet_predict\n    except Exception:\n        pass\n\n    from mhcflurry import (  # pylint: disable=import-error\n        Class1AffinityPredictor,\n        Class1PresentationPredictor,\n    )\n\n    df = pd.read_csv(args.input_csv, keep_default_na=False)\n    if not BASE_COLUMNS or not all(col in df.columns for col in BASE_COLUMNS):\n        raise ValueError(\"Input CSV missing required columns: %s\" % BASE_COLUMNS)\n\n    peptides = df[\"peptide\"].tolist()\n    alleles = df[\"allele\"].tolist()\n    n_flanks = df[\"n_flank\"].tolist()\n    c_flanks = df[\"c_flank\"].tolist()\n\n    affinity_predictor = Class1AffinityPredictor.load(args.class1_models_dir)\n    presentation_predictor = Class1PresentationPredictor.load(\n        args.presentation_models_dir\n    )\n\n    aff_df = affinity_predictor.predict_to_dataframe(\n        peptides=peptides,\n        alleles=alleles,\n        throw=False,\n        include_percentile_ranks=True,\n        include_confidence_intervals=True,\n        centrality_measure=args.centrality_measure,\n        model_kwargs={\"batch_size\": args.batch_size},\n    )\n\n    sample_names = alleles\n    alleles_map = {allele: [allele] for allele in sorted(set(alleles))}\n\n    pres_with_df = presentation_predictor.predict(\n        peptides=peptides,\n        alleles=alleles_map,\n        sample_names=sample_names,\n        n_flanks=n_flanks,\n        c_flanks=c_flanks,\n        include_affinity_percentile=True,\n        verbose=0,\n        throw=True,\n    ).sort_values(\"peptide_num\")\n\n    pres_without_df = presentation_predictor.predict(\n        peptides=peptides,\n        alleles=alleles_map,\n        sample_names=sample_names,\n        n_flanks=None,\n        c_flanks=None,\n        include_affinity_percentile=True,\n        verbose=0,\n        throw=True,\n    ).sort_values(\"peptide_num\")\n\n    out = df[BASE_COLUMNS].copy()\n\n    out[\"affinity_prediction\"] = aff_df[\"prediction\"].values\n    out[\"affinity_prediction_low\"] = aff_df.get(\"prediction_low\", np.nan)\n    out[\"affinity_prediction_high\"] = aff_df.get(\"prediction_high\", np.nan)\n    out[\"affinity_prediction_percentile\"] = aff_df.get(\"prediction_percentile\", np.nan)\n\n    out[\"pres_with_affinity\"] = pres_with_df[\"affinity\"].values\n    out[\"pres_with_best_allele\"] = pres_with_df[\"best_allele\"].astype(str).values\n    out[\"pres_with_affinity_percentile\"] = pres_with_df[\"affinity_percentile\"].values\n    out[\"processing_with_score\"] = pres_with_df[\"processing_score\"].values\n    out[\"pres_with_processing_score\"] = pres_with_df[\"processing_score\"].values\n    out[\"pres_with_presentation_score\"] = pres_with_df[\"presentation_score\"].values\n    out[\"pres_with_presentation_percentile\"] = pres_with_df[\n        \"presentation_percentile\"\n    ].values\n\n    out[\"pres_without_affinity\"] = pres_without_df[\"affinity\"].values\n    out[\"pres_without_best_allele\"] = pres_without_df[\"best_allele\"].astype(str).values\n    out[\"pres_without_affinity_percentile\"] = pres_without_df[\n        \"affinity_percentile\"\n    ].values\n    out[\"processing_without_score\"] = pres_without_df[\"processing_score\"].values\n    out[\"pres_without_processing_score\"] = pres_without_df[\"processing_score\"].values\n    out[\"pres_without_presentation_score\"] = pres_without_df[\n        \"presentation_score\"\n    ].values\n    out[\"pres_without_presentation_percentile\"] = pres_without_df[\n        \"presentation_percentile\"\n    ].values\n\n    Path(args.output_csv).parent.mkdir(parents=True, exist_ok=True)\n    out.to_csv(args.output_csv, index=False, compression=\"gzip\")\n\n\ndef _numeric_stats(\n    merged: pd.DataFrame,\n    column: str,\n    relative_epsilon: float,\n    top_k: int,\n) -> tuple[dict, list[dict]]:\n    pt_col = pd.to_numeric(merged[f\"{column}_pt\"], errors=\"coerce\")\n    tf_col = pd.to_numeric(merged[f\"{column}_tf\"], errors=\"coerce\")\n\n    valid_mask = np.isfinite(pt_col.values) & np.isfinite(tf_col.values)\n    valid_count = int(valid_mask.sum())\n    if valid_count == 0:\n        return {\"count\": 0}, []\n\n    pt_values = pt_col.values[valid_mask].astype(np.float64)\n    tf_values = tf_col.values[valid_mask].astype(np.float64)\n    diff = pt_values - tf_values\n    abs_diff = np.abs(diff)\n    rel_diff = abs_diff / np.maximum(np.abs(tf_values), relative_epsilon)\n\n    pearson = float(np.corrcoef(pt_values, tf_values)[0, 1]) if valid_count > 1 else np.nan\n\n    stats = {\n        \"count\": valid_count,\n        \"pt_mean\": float(pt_values.mean()),\n        \"tf_mean\": float(tf_values.mean()),\n        \"mean_diff\": float(diff.mean()),\n        \"mean_abs_diff\": float(abs_diff.mean()),\n        \"median_abs_diff\": float(np.median(abs_diff)),\n        \"p95_abs_diff\": float(np.percentile(abs_diff, 95)),\n        \"p99_abs_diff\": float(np.percentile(abs_diff, 99)),\n        \"max_abs_diff\": float(abs_diff.max()),\n        \"mean_rel_diff\": float(rel_diff.mean()),\n        \"p95_rel_diff\": float(np.percentile(rel_diff, 95)),\n        \"p99_rel_diff\": float(np.percentile(rel_diff, 99)),\n        \"max_rel_diff\": float(rel_diff.max()),\n        \"pearson_r\": pearson,\n    }\n\n    valid_idx = np.where(valid_mask)[0]\n    sorted_local = np.argsort(abs_diff)[::-1][:top_k]\n    outliers = []\n    for rank, local_idx in enumerate(sorted_local, start=1):\n        global_idx = valid_idx[local_idx]\n        row = merged.iloc[global_idx]\n        outliers.append(\n            {\n                \"column\": column,\n                \"rank\": rank,\n                \"row_id\": int(row[\"row_id\"]),\n                \"peptide\": row[\"peptide\"],\n                \"allele\": row[\"allele\"],\n                \"n_flank\": row[\"n_flank\"],\n                \"c_flank\": row[\"c_flank\"],\n                \"pt_value\": float(pt_values[local_idx]),\n                \"tf_value\": float(tf_values[local_idx]),\n                \"signed_diff\": float(diff[local_idx]),\n                \"abs_diff\": float(abs_diff[local_idx]),\n                \"rel_diff\": float(rel_diff[local_idx]),\n            }\n        )\n    return stats, outliers\n\n\ndef cmd_analyze(args: argparse.Namespace) -> None:\n    pt = pd.read_csv(args.pt_predictions_csv, keep_default_na=False)\n    tf = pd.read_csv(args.tf_predictions_csv, keep_default_na=False)\n\n    merged = pt.merge(\n        tf,\n        on=BASE_COLUMNS,\n        suffixes=(\"_pt\", \"_tf\"),\n        how=\"inner\",\n    )\n\n    numeric_columns = [\n        col for col in pt.columns if col not in BASE_COLUMNS + STRING_OUTPUT_COLUMNS\n    ]\n    string_columns = [col for col in STRING_OUTPUT_COLUMNS if col in pt.columns]\n\n    summary = {\n        \"generated_at_utc\": datetime.now(timezone.utc).isoformat(),\n        \"num_rows_pt\": int(len(pt)),\n        \"num_rows_tf\": int(len(tf)),\n        \"num_rows_merged\": int(len(merged)),\n        \"numeric_columns\": {},\n        \"string_columns\": {},\n    }\n    outlier_rows = []\n\n    for col in numeric_columns:\n        stats, outliers = _numeric_stats(\n            merged, col, relative_epsilon=args.relative_epsilon, top_k=args.top_k\n        )\n        summary[\"numeric_columns\"][col] = stats\n        outlier_rows.extend(outliers)\n\n    for col in string_columns:\n        pt_values = merged[f\"{col}_pt\"].astype(str)\n        tf_values = merged[f\"{col}_tf\"].astype(str)\n        mismatches = pt_values != tf_values\n        summary[\"string_columns\"][col] = {\n            \"count\": int(len(merged)),\n            \"mismatch_count\": int(mismatches.sum()),\n            \"mismatch_rate\": float(mismatches.mean()),\n        }\n\n    Path(args.summary_json).parent.mkdir(parents=True, exist_ok=True)\n    with open(args.summary_json, \"w\") as out:\n        json.dump(summary, out, indent=2, sort_keys=True)\n\n    if outlier_rows:\n        outlier_df = pd.DataFrame(outlier_rows)\n        outlier_df.to_csv(args.top_outliers_csv, index=False, compression=\"gzip\")\n    else:\n        pd.DataFrame().to_csv(args.top_outliers_csv, index=False, compression=\"gzip\")\n\n    report_lines = []\n    report_lines.append(\"TF vs PyTorch random comparison report\")\n    report_lines.append(\"generated_at_utc: %s\" % summary[\"generated_at_utc\"])\n    report_lines.append(\"rows_pt: %d\" % summary[\"num_rows_pt\"])\n    report_lines.append(\"rows_tf: %d\" % summary[\"num_rows_tf\"])\n    report_lines.append(\"rows_merged: %d\" % summary[\"num_rows_merged\"])\n    report_lines.append(\"\")\n    report_lines.append(\"Numeric columns:\")\n    for col, stats in summary[\"numeric_columns\"].items():\n        if stats.get(\"count\", 0) == 0:\n            report_lines.append(\"  %s: no valid numeric pairs\" % col)\n            continue\n        report_lines.append(\n            (\n                \"  %s: mean_abs=%.6g p95_abs=%.6g p99_abs=%.6g max_abs=%.6g \"\n                \"mean_rel=%.6g p99_rel=%.6g max_rel=%.6g r=%.8f\"\n            )\n            % (\n                col,\n                stats[\"mean_abs_diff\"],\n                stats[\"p95_abs_diff\"],\n                stats[\"p99_abs_diff\"],\n                stats[\"max_abs_diff\"],\n                stats[\"mean_rel_diff\"],\n                stats[\"p99_rel_diff\"],\n                stats[\"max_rel_diff\"],\n                stats[\"pearson_r\"],\n            )\n        )\n    report_lines.append(\"\")\n    report_lines.append(\"String columns:\")\n    for col, stats in summary[\"string_columns\"].items():\n        report_lines.append(\n            \"  %s: mismatch_rate=%.6g (%d/%d)\"\n            % (\n                col,\n                stats[\"mismatch_rate\"],\n                stats[\"mismatch_count\"],\n                stats[\"count\"],\n            )\n        )\n\n    Path(args.report_txt).parent.mkdir(parents=True, exist_ok=True)\n    with open(args.report_txt, \"w\") as out:\n        out.write(\"\\n\".join(report_lines) + \"\\n\")\n\n\ndef cmd_run(args: argparse.Namespace) -> None:\n    out_dir = Path(args.out_dir).resolve()\n    out_dir.mkdir(parents=True, exist_ok=True)\n\n    pt_repo_root = str(Path(args.pytorch_repo_root).resolve())\n    tf_repo_root = str(Path(args.tf_repo_root).resolve())\n\n    pt_meta_cmd = _self_cmd(\"backend-metadata\", \"--repo-root\", pt_repo_root)\n    tf_meta_cmd = _self_cmd(\"backend-metadata\", \"--repo-root\", tf_repo_root)\n    for cmd in (pt_meta_cmd, tf_meta_cmd):\n        _append_if_set(cmd, \"--class1-models-dir\", args.class1_models_dir)\n        _append_if_set(cmd, \"--presentation-models-dir\", args.presentation_models_dir)\n        _append_if_set(\n            cmd,\n            \"--processing-with-flanks-models-dir\",\n            args.processing_with_flanks_models_dir,\n        )\n        _append_if_set(\n            cmd,\n            \"--processing-without-flanks-models-dir\",\n            args.processing_without_flanks_models_dir,\n        )\n\n    pt_meta = _run_subprocess_json(pt_meta_cmd)\n    tf_meta = _run_subprocess_json(tf_meta_cmd)\n\n    class1_models_dir = args.class1_models_dir or pt_meta[\"class1_models_dir\"]\n    presentation_models_dir = (\n        args.presentation_models_dir or pt_meta[\"presentation_models_dir\"]\n    )\n    processing_with_flanks_models_dir = (\n        args.processing_with_flanks_models_dir\n        or pt_meta[\"processing_with_flanks_models_dir\"]\n    )\n    processing_without_flanks_models_dir = (\n        args.processing_without_flanks_models_dir\n        or pt_meta[\"processing_without_flanks_models_dir\"]\n    )\n\n    allele_pool = sorted(\n        set(pt_meta[\"supported_alleles\"]).intersection(tf_meta[\"supported_alleles\"])\n    )\n    if not args.allow_missing_affinity_percentiles:\n        pt_percentile = set(pt_meta[\"alleles_with_percentile_ranks\"])\n        tf_percentile = set(tf_meta[\"alleles_with_percentile_ranks\"])\n        allele_pool = [a for a in allele_pool if (a in pt_percentile and a in tf_percentile)]\n\n    allele_pool = _apply_allele_panel(\n        allele_pool=allele_pool,\n        allele_panel=args.allele_panel,\n        allele_regex=args.allele_regex,\n    )\n    if not allele_pool:\n        raise ValueError(\n            \"No alleles remain after applying panel '%s' and regex filter.\"\n            % args.allele_panel\n        )\n\n    if args.allele_subset_size and len(allele_pool) > args.allele_subset_size:\n        rng = np.random.default_rng(args.seed)\n        indices = rng.choice(\n            len(allele_pool), size=args.allele_subset_size, replace=False\n        )\n        allele_pool = sorted([allele_pool[i] for i in indices.tolist()])\n\n    print(\n        \"Using %d alleles (panel=%s).\" % (len(allele_pool), args.allele_panel)\n    )\n\n    pt_min_len, pt_max_len = pt_meta[\"affinity_supported_peptide_lengths\"]\n    tf_min_len, tf_max_len = tf_meta[\"affinity_supported_peptide_lengths\"]\n    peptide_min_len = max(int(pt_min_len), int(tf_min_len))\n    peptide_max_len = min(int(pt_max_len), int(tf_max_len))\n\n    pt_with = pt_meta[\"with_flanks_lengths\"]\n    tf_with = tf_meta[\"with_flanks_lengths\"]\n    n_flank_max = min(int(pt_with[\"n_flank\"]), int(tf_with[\"n_flank\"]))\n    c_flank_max = min(int(pt_with[\"c_flank\"]), int(tf_with[\"c_flank\"]))\n\n    dataset_path = out_dir / \"dataset.csv.gz\"\n    pt_predictions_path = out_dir / \"pt_predictions.csv.gz\"\n    tf_predictions_path = out_dir / \"tf_predictions.csv.gz\"\n    summary_path = out_dir / \"diff_summary.json\"\n    report_path = out_dir / \"diff_report.txt\"\n    outliers_path = out_dir / \"top_outliers.csv.gz\"\n\n    dataset = _generate_dataset(\n        num_examples=args.num_examples,\n        alleles=allele_pool,\n        peptide_min_len=peptide_min_len,\n        peptide_max_len=peptide_max_len,\n        n_flank_max=n_flank_max,\n        c_flank_max=c_flank_max,\n        seed=args.seed,\n    )\n    dataset.to_csv(dataset_path, index=False, compression=\"gzip\")\n\n    pt_predict_cmd = _self_cmd(\n        \"predict-backend\",\n        \"--repo-root\",\n        pt_repo_root,\n        \"--input-csv\",\n        str(dataset_path),\n        \"--output-csv\",\n        str(pt_predictions_path),\n        \"--batch-size\",\n        str(args.batch_size),\n        \"--centrality-measure\",\n        args.centrality_measure,\n    )\n    tf_predict_cmd = _self_cmd(\n        \"predict-backend\",\n        \"--repo-root\",\n        tf_repo_root,\n        \"--input-csv\",\n        str(dataset_path),\n        \"--output-csv\",\n        str(tf_predictions_path),\n        \"--batch-size\",\n        str(args.batch_size),\n        \"--centrality-measure\",\n        args.centrality_measure,\n    )\n    for cmd in (pt_predict_cmd, tf_predict_cmd):\n        _append_if_set(cmd, \"--class1-models-dir\", class1_models_dir)\n        _append_if_set(cmd, \"--presentation-models-dir\", presentation_models_dir)\n        _append_if_set(\n            cmd,\n            \"--processing-with-flanks-models-dir\",\n            processing_with_flanks_models_dir,\n        )\n        _append_if_set(\n            cmd,\n            \"--processing-without-flanks-models-dir\",\n            processing_without_flanks_models_dir,\n        )\n\n    _run_subprocess(pt_predict_cmd)\n    _run_subprocess(tf_predict_cmd)\n\n    _run_subprocess(\n        _self_cmd(\n            \"analyze\",\n            \"--pt-predictions-csv\",\n            str(pt_predictions_path),\n            \"--tf-predictions-csv\",\n            str(tf_predictions_path),\n            \"--summary-json\",\n            str(summary_path),\n            \"--report-txt\",\n            str(report_path),\n            \"--top-outliers-csv\",\n            str(outliers_path),\n            \"--top-k\",\n            str(args.top_k),\n            \"--relative-epsilon\",\n            str(args.relative_epsilon),\n        )\n    )\n\n    print(\"Wrote dataset:\", dataset_path)\n    print(\"Wrote PT predictions:\", pt_predictions_path)\n    print(\"Wrote TF predictions:\", tf_predictions_path)\n    print(\"Wrote summary:\", summary_path)\n    print(\"Wrote report:\", report_path)\n    print(\"Wrote outliers:\", outliers_path)\n\n\ndef _add_common_model_dir_args(parser: argparse.ArgumentParser) -> None:\n    parser.add_argument(\"--class1-models-dir\", default=None)\n    parser.add_argument(\"--presentation-models-dir\", default=None)\n    parser.add_argument(\"--processing-with-flanks-models-dir\", default=None)\n    parser.add_argument(\"--processing-without-flanks-models-dir\", default=None)\n\n\ndef build_parser() -> argparse.ArgumentParser:\n    parser = argparse.ArgumentParser()\n    subparsers = parser.add_subparsers(dest=\"command\", required=True)\n\n    run = subparsers.add_parser(\"run\")\n    run.add_argument(\"--tf-repo-root\", required=True)\n    run.add_argument(\"--pytorch-repo-root\", default=str(_repo_root_default()))\n    run.add_argument(\"--num-examples\", type=int, default=120000)\n    run.add_argument(\"--seed\", type=int, default=1)\n    run.add_argument(\n        \"--allele-panel\",\n        choices=[\"iedb_plus_animals\", \"all_hla\", \"all\"],\n        default=\"iedb_plus_animals\",\n        help=(\n            \"Preset allele list. 'iedb_plus_animals' uses ~30 common human alleles \"\n            \"plus a few animal alleles.\"\n        ),\n    )\n    run.add_argument(\n        \"--allele-regex\",\n        default=None,\n        help=\"Optional extra regex filter applied after allele panel selection.\",\n    )\n    run.add_argument(\n        \"--allele-subset-size\",\n        type=int,\n        default=None,\n        help=\"Optional random subset size after panel+regex filtering.\",\n    )\n    run.add_argument(\n        \"--allow-missing-affinity-percentiles\",\n        action=\"store_true\",\n        help=(\n            \"If set, allow random alleles with missing affinity percentile calibrations. \"\n            \"Default behavior restricts to alleles with percentiles in both backends.\"\n        ),\n    )\n    run.add_argument(\"--batch-size\", type=int, default=4096)\n    run.add_argument(\"--centrality-measure\", default=\"mean\")\n    run.add_argument(\"--out-dir\", default=\"/tmp/mhcflurry-random-parity\")\n    run.add_argument(\"--top-k\", type=int, default=25)\n    run.add_argument(\"--relative-epsilon\", type=float, default=1e-12)\n    _add_common_model_dir_args(run)\n    run.set_defaults(func=cmd_run)\n\n    metadata = subparsers.add_parser(\"backend-metadata\")\n    metadata.add_argument(\"--repo-root\", required=True)\n    _add_common_model_dir_args(metadata)\n    metadata.set_defaults(func=cmd_backend_metadata)\n\n    predict = subparsers.add_parser(\"predict-backend\")\n    predict.add_argument(\"--repo-root\", required=True)\n    predict.add_argument(\"--input-csv\", required=True)\n    predict.add_argument(\"--output-csv\", required=True)\n    predict.add_argument(\"--batch-size\", type=int, default=4096)\n    predict.add_argument(\"--centrality-measure\", default=\"mean\")\n    _add_common_model_dir_args(predict)\n    predict.set_defaults(func=cmd_predict_backend)\n\n    analyze = subparsers.add_parser(\"analyze\")\n    analyze.add_argument(\"--pt-predictions-csv\", required=True)\n    analyze.add_argument(\"--tf-predictions-csv\", required=True)\n    analyze.add_argument(\"--summary-json\", required=True)\n    analyze.add_argument(\"--report-txt\", required=True)\n    analyze.add_argument(\"--top-outliers-csv\", required=True)\n    analyze.add_argument(\"--top-k\", type=int, default=25)\n    analyze.add_argument(\"--relative-epsilon\", type=float, default=1e-12)\n    analyze.set_defaults(func=cmd_analyze)\n\n    return parser\n\n\ndef main() -> None:\n    parser = build_parser()\n    args = parser.parse_args()\n\n    # Normalize empty-string optional directory args from subprocess command args.\n    for key in [\n        \"class1_models_dir\",\n        \"presentation_models_dir\",\n        \"processing_with_flanks_models_dir\",\n        \"processing_without_flanks_models_dir\",\n    ]:\n        if hasattr(args, key) and getattr(args, key) == \"\":\n            setattr(args, key, None)\n\n    args.func(args)\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "scripts/cross_allele_parity_analysis.py",
    "content": "\"\"\"\nCross-allele TF vs PyTorch parity analysis for MHCflurry.\n\nThis script:\n1. Selects a limited curated allele panel (default: common human + a few animal).\n2. Generates random peptides uniformly across lengths (default: 7-15 when supported).\n3. Generates random flanks for each peptide (unique N and C flanks across peptide entries).\n4. Builds a full cross-product dataset: peptides x alleles.\n5. Runs predictions for PyTorch branch and TF master via subprocess isolation.\n6. Produces summary stats, outlier tables, and diagnostic plots.\n\nExample:\n  python scripts/cross_allele_parity_analysis.py \\\n    --tf-repo-root /tmp/mhcflurry-master-check \\\n    --num-peptides 1000 \\\n    --out-dir /tmp/mhcflurry-cross-allele-parity\n\"\"\"\n\nfrom __future__ import annotations\n\nimport argparse\nimport json\nimport re\nimport subprocess\nimport sys\nfrom pathlib import Path\n\nimport matplotlib\n\nmatplotlib.use(\"Agg\")\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\n\n\nAA20 = \"ACDEFGHIKLMNPQRSTVWY\"\nBASE_COLUMNS = [\"row_id\", \"peptide\", \"allele\", \"n_flank\", \"c_flank\"]\nSTRING_OUTPUT_COLUMNS = [\"pres_with_best_allele\", \"pres_without_best_allele\"]\n\n# Common human class I alleles often used for broad coverage checks.\nIEDB_POPCOV_HUMAN_ALLELES = [\n    \"HLA-A*01:01\",\n    \"HLA-A*02:01\",\n    \"HLA-A*03:01\",\n    \"HLA-A*24:02\",\n    \"HLA-A*26:01\",\n    \"HLA-A*30:01\",\n    \"HLA-A*30:02\",\n    \"HLA-A*31:01\",\n    \"HLA-A*33:01\",\n    \"HLA-A*68:01\",\n    \"HLA-B*07:02\",\n    \"HLA-B*08:01\",\n    \"HLA-B*15:01\",\n    \"HLA-B*35:01\",\n    \"HLA-B*40:01\",\n    \"HLA-B*44:02\",\n    \"HLA-B*44:03\",\n    \"HLA-B*51:01\",\n    \"HLA-B*53:01\",\n    \"HLA-B*57:01\",\n    \"HLA-B*58:01\",\n    \"HLA-C*03:04\",\n    \"HLA-C*04:01\",\n    \"HLA-C*05:01\",\n    \"HLA-C*06:02\",\n    \"HLA-C*07:01\",\n    \"HLA-C*07:02\",\n    \"HLA-C*08:02\",\n    \"HLA-C*12:03\",\n    \"HLA-C*15:02\",\n]\n\n# A few non-human alleles to ensure cross-species sanity.\nEXTRA_ANIMAL_ALLELES = [\n    \"H2-K*b\",\n    \"H2-D*b\",\n    \"H2-K*d\",\n    \"H2-L*d\",\n    \"DLA-88*01:01\",\n    \"SLA-1*04:01\",\n]\n\n\ndef _repo_root_default() -> Path:\n    return Path(__file__).resolve().parents[1]\n\n\ndef _compare_script_path() -> Path:\n    return Path(__file__).resolve().parent / \"compare_tf_pytorch_random_outputs.py\"\n\n\ndef _self_cmd(*args: str) -> list[str]:\n    return [sys.executable, str(_compare_script_path()), *args]\n\n\ndef _run_subprocess_json(cmd: list[str]) -> dict:\n    completed = subprocess.run(cmd, check=True, capture_output=True, text=True)\n    stdout = completed.stdout.strip()\n    if not stdout:\n        raise RuntimeError(\"No JSON output from command: %s\" % \" \".join(cmd))\n    return json.loads(stdout.splitlines()[-1])\n\n\ndef _run_subprocess(cmd: list[str]) -> None:\n    subprocess.run(cmd, check=True)\n\n\ndef _append_if_set(cmd: list[str], flag: str, value: str | None) -> None:\n    if value:\n        cmd.extend([flag, value])\n\n\ndef _select_alleles(\n    pt_meta: dict,\n    tf_meta: dict,\n    allele_panel: str,\n    allele_regex: str | None,\n    require_percentiles: bool,\n) -> list[str]:\n    allele_pool = sorted(\n        set(pt_meta[\"supported_alleles\"]).intersection(tf_meta[\"supported_alleles\"])\n    )\n    if require_percentiles:\n        pt_percentile = set(pt_meta[\"alleles_with_percentile_ranks\"])\n        tf_percentile = set(tf_meta[\"alleles_with_percentile_ranks\"])\n        allele_pool = [a for a in allele_pool if a in pt_percentile and a in tf_percentile]\n\n    if allele_panel == \"iedb_plus_animals\":\n        requested = IEDB_POPCOV_HUMAN_ALLELES + EXTRA_ANIMAL_ALLELES\n        allele_pool = [a for a in requested if a in allele_pool]\n    elif allele_panel == \"all_hla\":\n        pattern = re.compile(r\"^HLA-[ABC]\\*\")\n        allele_pool = [a for a in allele_pool if pattern.search(a)]\n    elif allele_panel == \"all\":\n        pass\n    else:\n        raise ValueError(\"Unknown allele panel: %s\" % allele_panel)\n\n    if allele_regex:\n        pattern = re.compile(allele_regex)\n        allele_pool = [a for a in allele_pool if pattern.search(a)]\n\n    if not allele_pool:\n        raise ValueError(\"No alleles remain after panel/filter selection.\")\n    return allele_pool\n\n\ndef _lengths_to_sample(\n    min_supported: int, max_supported: int, min_requested: int, max_requested: int\n) -> list[int]:\n    start = max(min_supported, min_requested)\n    end = min(max_supported, max_requested)\n    if end < start:\n        raise ValueError(\n            \"No overlap in peptide lengths. Supported=[%d,%d], requested=[%d,%d]\"\n            % (min_supported, max_supported, min_requested, max_requested)\n        )\n    lengths = list(range(start, end + 1))\n    # If 7 is not supported but 8 is, satisfy the user's fallback request.\n    if 7 not in lengths and 8 in lengths and min_requested <= 7 <= max_requested:\n        return [length for length in lengths if length >= 8]\n    return lengths\n\n\ndef _random_sequences(\n    rng: np.random.Generator,\n    length: int,\n    n: int,\n    used: set[str],\n) -> list[str]:\n    chars = np.array(list(AA20), dtype=\"<U1\")\n    out: list[str] = []\n    while len(out) < n:\n        needed = n - len(out)\n        batch = max(needed * 2, 64)\n        arr = rng.choice(chars, size=(batch, length))\n        for row in arr:\n            seq = \"\".join(row.tolist())\n            if seq in used:\n                continue\n            used.add(seq)\n            out.append(seq)\n            if len(out) >= n:\n                break\n    return out\n\n\ndef _generate_uniform_peptides(\n    num_peptides: int,\n    lengths: list[int],\n    n_flank_length: int,\n    c_flank_length: int,\n    seed: int,\n) -> pd.DataFrame:\n    if num_peptides <= 0:\n        raise ValueError(\"num_peptides must be positive\")\n    if not lengths:\n        raise ValueError(\"No lengths provided\")\n    if n_flank_length <= 0 or c_flank_length <= 0:\n        raise ValueError(\n            \"Flank lengths must be positive (got n=%d c=%d).\"\n            % (n_flank_length, c_flank_length)\n        )\n\n    rng = np.random.default_rng(seed)\n    per_length = num_peptides // len(lengths)\n    remainder = num_peptides % len(lengths)\n\n    rows = []\n    used: set[str] = set()\n    used_n_flanks: set[str] = set()\n    used_c_flanks: set[str] = set()\n    n_flanks = _random_sequences(\n        rng=rng, length=n_flank_length, n=num_peptides, used=used_n_flanks\n    )\n    c_flanks = _random_sequences(\n        rng=rng, length=c_flank_length, n=num_peptides, used=used_c_flanks\n    )\n\n    peptide_id = 0\n    for i, length in enumerate(lengths):\n        count = per_length + (1 if i < remainder else 0)\n        seqs = _random_sequences(rng=rng, length=length, n=count, used=used)\n        for seq in seqs:\n            rows.append(\n                {\n                    \"peptide_id\": peptide_id,\n                    \"peptide\": seq,\n                    \"peptide_length\": length,\n                    \"n_flank\": n_flanks[peptide_id],\n                    \"c_flank\": c_flanks[peptide_id],\n                }\n            )\n            peptide_id += 1\n    return pd.DataFrame(rows)\n\n\ndef _cross_join_dataset(peptides_df: pd.DataFrame, alleles: list[str]) -> pd.DataFrame:\n    n_peptides = len(peptides_df)\n    n_alleles = len(alleles)\n    total = n_peptides * n_alleles\n\n    peptide_vals = peptides_df[\"peptide\"].values\n    length_vals = peptides_df[\"peptide_length\"].values\n    peptide_id_vals = peptides_df[\"peptide_id\"].values\n    n_flank_vals = peptides_df[\"n_flank\"].values\n    c_flank_vals = peptides_df[\"c_flank\"].values\n\n    peptide_repeated = np.repeat(peptide_vals, n_alleles)\n    length_repeated = np.repeat(length_vals, n_alleles)\n    peptide_id_repeated = np.repeat(peptide_id_vals, n_alleles)\n    n_flank_repeated = np.repeat(n_flank_vals, n_alleles)\n    c_flank_repeated = np.repeat(c_flank_vals, n_alleles)\n    allele_tiled = np.tile(np.array(alleles), n_peptides)\n\n    out = pd.DataFrame(\n        {\n            \"row_id\": np.arange(total, dtype=np.int64),\n            \"peptide_id\": peptide_id_repeated,\n            \"peptide_length\": length_repeated,\n            \"peptide\": peptide_repeated,\n            \"allele\": allele_tiled,\n            \"n_flank\": n_flank_repeated,\n            \"c_flank\": c_flank_repeated,\n        }\n    )\n    return out\n\n\ndef _pre_run_sanity_checks(peptides_df: pd.DataFrame, dataset: pd.DataFrame) -> None:\n    for col in [\"peptide\", \"n_flank\", \"c_flank\"]:\n        if not peptides_df[col].is_unique:\n            dupes = int(peptides_df[col].duplicated().sum())\n            raise ValueError(\n                \"Pre-run sanity check failed: %s has %d repeated values \"\n                \"across peptide entries.\"\n                % (col, dupes)\n            )\n    if peptides_df[[\"peptide\", \"n_flank\", \"c_flank\"]].duplicated().any():\n        raise ValueError(\n            \"Pre-run sanity check failed: duplicate (peptide, n_flank, c_flank) tuples.\"\n        )\n    if dataset[[\"peptide\", \"allele\", \"n_flank\", \"c_flank\"]].duplicated().any():\n        raise ValueError(\n            \"Pre-run sanity check failed: duplicate (peptide, allele, n_flank, c_flank) rows.\"\n        )\n\n\ndef _enforce_presentation_score_requirements(\n    predictions: pd.DataFrame,\n    label: str,\n    min_fraction_above: float = 0.01,\n    threshold: float = 0.2,\n    min_max_score: float = 0.9,\n) -> dict:\n    stats = {}\n    for col in [\"pres_with_presentation_score\", \"pres_without_presentation_score\"]:\n        if col not in predictions.columns:\n            continue\n        scores = pd.to_numeric(predictions[col], errors=\"coerce\")\n        frac = float((scores > threshold).mean())\n        max_score = float(scores.max())\n        stats[col] = {\n            \"fraction_gt_threshold\": frac,\n            \"threshold\": threshold,\n            \"max_score\": max_score,\n            \"min_required_fraction\": min_fraction_above,\n            \"min_required_max_score\": min_max_score,\n        }\n        if frac < min_fraction_above:\n            raise ValueError(\n                \"%s failed presentation sanity: %s has %.6f fraction > %.3f, need >= %.3f\"\n                % (label, col, frac, threshold, min_fraction_above)\n            )\n        if max_score <= min_max_score:\n            raise ValueError(\n                \"%s failed presentation sanity: %s max %.6f, need > %.3f\"\n                % (label, col, max_score, min_max_score)\n            )\n    return stats\n\n\ndef _numeric_output_columns(df: pd.DataFrame) -> list[str]:\n    return [c for c in df.columns if c not in BASE_COLUMNS + STRING_OUTPUT_COLUMNS]\n\n\ndef _make_diff_frame(merged: pd.DataFrame, numeric_columns: list[str]) -> pd.DataFrame:\n    out = merged[BASE_COLUMNS].copy()\n    for col in numeric_columns:\n        pt = pd.to_numeric(merged[f\"{col}_pt\"], errors=\"coerce\")\n        tf = pd.to_numeric(merged[f\"{col}_tf\"], errors=\"coerce\")\n        out[f\"{col}_pt\"] = pt\n        out[f\"{col}_tf\"] = tf\n        out[f\"{col}_diff\"] = pt - tf\n        out[f\"{col}_abs_diff\"] = (pt - tf).abs()\n    return out\n\n\ndef _per_output_summary(diff_df: pd.DataFrame, numeric_columns: list[str]) -> pd.DataFrame:\n    rows = []\n    for col in numeric_columns:\n        abs_diff = pd.to_numeric(diff_df[f\"{col}_abs_diff\"], errors=\"coerce\").dropna()\n        if abs_diff.empty:\n            continue\n        rows.append(\n            {\n                \"output\": col,\n                \"count\": int(abs_diff.shape[0]),\n                \"mean_abs_diff\": float(abs_diff.mean()),\n                \"median_abs_diff\": float(abs_diff.median()),\n                \"p95_abs_diff\": float(abs_diff.quantile(0.95)),\n                \"p99_abs_diff\": float(abs_diff.quantile(0.99)),\n                \"max_abs_diff\": float(abs_diff.max()),\n            }\n        )\n    return pd.DataFrame(rows).sort_values(\"max_abs_diff\", ascending=False)\n\n\ndef _break_thresholds_for_output(output: str) -> float:\n    if output.startswith(\"affinity_prediction\") or output.endswith(\"_affinity\"):\n        return 0.1  # nM\n    if \"affinity_percentile\" in output or \"presentation_percentile\" in output:\n        return 0.1  # percentile points\n    return 1e-4  # score-scale outputs\n\n\ndef _break_analysis(\n    merged: pd.DataFrame,\n    diff_df: pd.DataFrame,\n    numeric_columns: list[str],\n) -> tuple[pd.DataFrame, pd.DataFrame]:\n    break_rows = []\n    max_abs_per_row = np.zeros(len(diff_df), dtype=np.float64)\n    max_output_per_row = np.array([\"\"] * len(diff_df), dtype=object)\n\n    for col in numeric_columns:\n        abs_col = pd.to_numeric(diff_df[f\"{col}_abs_diff\"], errors=\"coerce\").fillna(0.0)\n        abs_vals = abs_col.values\n        threshold = _break_thresholds_for_output(col)\n        break_mask = abs_vals > threshold\n        break_rows.append(\n            {\n                \"output\": col,\n                \"threshold_abs_diff\": threshold,\n                \"break_count\": int(break_mask.sum()),\n                \"break_rate\": float(break_mask.mean()),\n            }\n        )\n        improve_mask = abs_vals > max_abs_per_row\n        max_abs_per_row[improve_mask] = abs_vals[improve_mask]\n        max_output_per_row[improve_mask] = col\n\n    row_summary = merged[BASE_COLUMNS].copy()\n    row_summary[\"max_abs_diff_any_output\"] = max_abs_per_row\n    row_summary[\"worst_output\"] = max_output_per_row\n    return pd.DataFrame(break_rows), row_summary\n\n\ndef _plot_output_ranges(summary_df: pd.DataFrame, out_path: Path) -> None:\n    plot_df = summary_df.sort_values(\"max_abs_diff\", ascending=True)\n    y = np.arange(len(plot_df))\n\n    fig_h = max(4.5, 0.3 * len(plot_df))\n    plt.figure(figsize=(10, fig_h))\n    plt.hlines(y, 0, plot_df[\"max_abs_diff\"], color=\"#1f77b4\", linewidth=2.0, label=\"max\")\n    plt.hlines(\n        y,\n        0,\n        plot_df[\"p99_abs_diff\"],\n        color=\"#ff7f0e\",\n        linewidth=2.0,\n        alpha=0.9,\n        label=\"p99\",\n    )\n    plt.hlines(\n        y,\n        0,\n        plot_df[\"p95_abs_diff\"],\n        color=\"#2ca02c\",\n        linewidth=2.0,\n        alpha=0.9,\n        label=\"p95\",\n    )\n    plt.plot(plot_df[\"mean_abs_diff\"], y, \"o\", color=\"#d62728\", label=\"mean\", markersize=4)\n    plt.yticks(y, plot_df[\"output\"])\n    plt.xscale(\"log\")\n    plt.xlabel(\"Absolute difference (log scale)\")\n    plt.ylabel(\"Output\")\n    plt.title(\"TF vs PyTorch absolute difference ranges by output\")\n    plt.legend(loc=\"lower right\")\n    plt.tight_layout()\n    plt.savefig(out_path, dpi=180)\n    plt.close()\n\n\ndef _plot_row_max_hist(row_summary: pd.DataFrame, out_path: Path) -> None:\n    vals = row_summary[\"max_abs_diff_any_output\"].values\n    bins = np.logspace(\n        np.log10(max(vals.min(), 1e-12)),\n        np.log10(max(vals.max(), 1e-12)) if vals.max() > 0 else -12,\n        60,\n    )\n    bins = np.unique(bins)\n    if bins.shape[0] < 2:\n        bins = np.array([1e-12, 1e-11])\n\n    plt.figure(figsize=(8, 4.5))\n    plt.hist(vals, bins=bins, color=\"#1f77b4\", alpha=0.85)\n    plt.xscale(\"log\")\n    plt.xlabel(\"Max absolute difference across outputs (per pMHC)\")\n    plt.ylabel(\"Count\")\n    plt.title(\"Per-row worst-case difference distribution\")\n    plt.tight_layout()\n    plt.savefig(out_path, dpi=180)\n    plt.close()\n\n\ndef _plot_length_breakdown(\n    diff_df: pd.DataFrame,\n    target_output: str,\n    out_path: Path,\n) -> None:\n    abs_col = f\"{target_output}_abs_diff\"\n    if abs_col not in diff_df.columns:\n        return\n\n    grouped = (\n        diff_df.groupby(diff_df[\"peptide\"].str.len())[abs_col]\n        .agg([\"mean\", \"median\", \"max\"])\n        .reset_index()\n        .rename(columns={\"peptide\": \"peptide_length\"})\n    )\n    grouped = grouped.sort_values(\"peptide_length\")\n\n    plt.figure(figsize=(8, 4.5))\n    plt.plot(grouped[\"peptide_length\"], grouped[\"mean\"], marker=\"o\", label=\"mean\")\n    plt.plot(grouped[\"peptide_length\"], grouped[\"median\"], marker=\"o\", label=\"median\")\n    plt.plot(grouped[\"peptide_length\"], grouped[\"max\"], marker=\"o\", label=\"max\")\n    plt.yscale(\"log\")\n    plt.xlabel(\"Peptide length\")\n    plt.ylabel(f\"{target_output} abs diff (log)\")\n    plt.title(f\"Difference vs peptide length: {target_output}\")\n    plt.legend()\n    plt.tight_layout()\n    plt.savefig(out_path, dpi=180)\n    plt.close()\n\n\ndef _safe_plot_name(name: str) -> str:\n    return re.sub(r\"[^A-Za-z0-9._-]+\", \"_\", name).strip(\"_\")\n\n\ndef _plot_per_output_hists(\n    diff_df: pd.DataFrame,\n    numeric_columns: list[str],\n    out_dir: Path,\n) -> None:\n    out_dir.mkdir(parents=True, exist_ok=True)\n    for output in numeric_columns:\n        abs_col = f\"{output}_abs_diff\"\n        if abs_col not in diff_df.columns:\n            continue\n        vals = pd.to_numeric(diff_df[abs_col], errors=\"coerce\").dropna().values.astype(float)\n        if vals.size == 0:\n            continue\n\n        positive = vals[vals > 0]\n        if positive.size > 0:\n            epsilon = positive.min() / 10.0\n        else:\n            epsilon = 1e-18\n        plot_vals = np.where(vals > 0, vals, epsilon)\n\n        lo = float(plot_vals.min())\n        hi = float(plot_vals.max())\n        if not np.isfinite(lo) or not np.isfinite(hi):\n            continue\n        if hi > lo:\n            bins = np.logspace(np.log10(lo), np.log10(hi), 60)\n            bins = np.unique(bins)\n            if bins.shape[0] < 2:\n                bins = np.array([lo, hi + lo * 1e-6], dtype=float)\n        else:\n            bins = np.array([lo * 0.9, hi * 1.1], dtype=float)\n            bins = np.where(bins <= 0, epsilon, bins)\n\n        plt.figure(figsize=(8, 4.5))\n        plt.hist(plot_vals, bins=bins, color=\"#1f77b4\", alpha=0.85)\n        plt.xscale(\"log\")\n        plt.xlabel(f\"{output} absolute difference\")\n        plt.ylabel(\"Count\")\n        plt.title(\n            f\"{output} abs diff histogram (zero_count={(vals == 0).sum()}, max={vals.max():.3g})\"\n        )\n        plt.tight_layout()\n        plot_name = f\"{_safe_plot_name(output)}.png\"\n        plt.savefig(out_dir / plot_name, dpi=180)\n        plt.close()\n\n\ndef _write_break_report(\n    out_path: Path,\n    allele_count: int,\n    peptide_count: int,\n    pmhc_count: int,\n    lengths: list[int],\n    break_df: pd.DataFrame,\n    row_summary: pd.DataFrame,\n    top_rows: pd.DataFrame,\n) -> None:\n    lines = []\n    lines.append(\"Cross-allele TF vs PyTorch break analysis\")\n    lines.append(f\"alleles: {allele_count}\")\n    lines.append(f\"peptides: {peptide_count}\")\n    lines.append(f\"pMHC rows: {pmhc_count}\")\n    lines.append(f\"peptide lengths sampled: {lengths}\")\n    lines.append(\"\")\n    lines.append(\"Thresholded break counts by output:\")\n    for _, row in break_df.sort_values(\"break_rate\", ascending=False).iterrows():\n        lines.append(\n            \"  {output}: threshold={thr:.3g}, break_count={cnt}, break_rate={rate:.6g}\".format(\n                output=row[\"output\"],\n                thr=row[\"threshold_abs_diff\"],\n                cnt=int(row[\"break_count\"]),\n                rate=float(row[\"break_rate\"]),\n            )\n        )\n    lines.append(\"\")\n    lines.append(\n        \"Per-row max abs diff summary: mean={:.6g}, p95={:.6g}, p99={:.6g}, max={:.6g}\".format(\n            float(row_summary[\"max_abs_diff_any_output\"].mean()),\n            float(row_summary[\"max_abs_diff_any_output\"].quantile(0.95)),\n            float(row_summary[\"max_abs_diff_any_output\"].quantile(0.99)),\n            float(row_summary[\"max_abs_diff_any_output\"].max()),\n        )\n    )\n    lines.append(\"\")\n    lines.append(\"Top worst rows:\")\n    for _, row in top_rows.iterrows():\n        lines.append(\n            \"  row_id={row_id} peptide={peptide} allele={allele} \"\n            \"worst_output={worst_output} max_abs_diff={max_abs_diff_any_output:.6g}\".format(\n                **row.to_dict()\n            )\n        )\n    out_path.write_text(\"\\n\".join(lines) + \"\\n\")\n\n\ndef build_parser() -> argparse.ArgumentParser:\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"--tf-repo-root\", required=True)\n    parser.add_argument(\"--pytorch-repo-root\", default=str(_repo_root_default()))\n    parser.add_argument(\"--num-peptides\", type=int, default=1000)\n    parser.add_argument(\"--seed\", type=int, default=1)\n    parser.add_argument(\"--min-length\", type=int, default=7)\n    parser.add_argument(\"--max-length\", type=int, default=15)\n    parser.add_argument(\n        \"--allele-panel\",\n        choices=[\"iedb_plus_animals\", \"all_hla\", \"all\"],\n        default=\"iedb_plus_animals\",\n    )\n    parser.add_argument(\"--allele-regex\", default=None)\n    parser.add_argument(\n        \"--allow-missing-affinity-percentiles\",\n        action=\"store_true\",\n        help=\"Allow alleles that lack calibrated affinity percentiles.\",\n    )\n    parser.add_argument(\"--batch-size\", type=int, default=4096)\n    parser.add_argument(\"--centrality-measure\", default=\"mean\")\n    parser.add_argument(\"--top-k\", type=int, default=50)\n    parser.add_argument(\"--out-dir\", default=\"/tmp/mhcflurry-cross-allele-parity\")\n    parser.add_argument(\"--class1-models-dir\", default=None)\n    parser.add_argument(\"--presentation-models-dir\", default=None)\n    parser.add_argument(\"--processing-with-flanks-models-dir\", default=None)\n    parser.add_argument(\"--processing-without-flanks-models-dir\", default=None)\n    return parser\n\n\ndef main() -> None:\n    args = build_parser().parse_args()\n\n    out_dir = Path(args.out_dir).resolve()\n    out_dir.mkdir(parents=True, exist_ok=True)\n    plots_dir = out_dir / \"plots\"\n    plots_dir.mkdir(parents=True, exist_ok=True)\n\n    pt_repo_root = str(Path(args.pytorch_repo_root).resolve())\n    tf_repo_root = str(Path(args.tf_repo_root).resolve())\n\n    pt_meta_cmd = _self_cmd(\"backend-metadata\", \"--repo-root\", pt_repo_root)\n    tf_meta_cmd = _self_cmd(\"backend-metadata\", \"--repo-root\", tf_repo_root)\n    for cmd in (pt_meta_cmd, tf_meta_cmd):\n        _append_if_set(cmd, \"--class1-models-dir\", args.class1_models_dir)\n        _append_if_set(cmd, \"--presentation-models-dir\", args.presentation_models_dir)\n        _append_if_set(\n            cmd,\n            \"--processing-with-flanks-models-dir\",\n            args.processing_with_flanks_models_dir,\n        )\n        _append_if_set(\n            cmd,\n            \"--processing-without-flanks-models-dir\",\n            args.processing_without_flanks_models_dir,\n        )\n\n    pt_meta = _run_subprocess_json(pt_meta_cmd)\n    tf_meta = _run_subprocess_json(tf_meta_cmd)\n\n    class1_models_dir = args.class1_models_dir or pt_meta[\"class1_models_dir\"]\n    presentation_models_dir = args.presentation_models_dir or pt_meta[\"presentation_models_dir\"]\n    processing_with_flanks_models_dir = (\n        args.processing_with_flanks_models_dir\n        or pt_meta[\"processing_with_flanks_models_dir\"]\n    )\n    processing_without_flanks_models_dir = (\n        args.processing_without_flanks_models_dir\n        or pt_meta[\"processing_without_flanks_models_dir\"]\n    )\n\n    alleles = _select_alleles(\n        pt_meta=pt_meta,\n        tf_meta=tf_meta,\n        allele_panel=args.allele_panel,\n        allele_regex=args.allele_regex,\n        require_percentiles=(not args.allow_missing_affinity_percentiles),\n    )\n\n    pt_min_len, pt_max_len = pt_meta[\"affinity_supported_peptide_lengths\"]\n    tf_min_len, tf_max_len = tf_meta[\"affinity_supported_peptide_lengths\"]\n    lengths = _lengths_to_sample(\n        min_supported=max(int(pt_min_len), int(tf_min_len)),\n        max_supported=min(int(pt_max_len), int(tf_max_len)),\n        min_requested=args.min_length,\n        max_requested=args.max_length,\n    )\n    pt_with = pt_meta[\"with_flanks_lengths\"]\n    tf_with = tf_meta[\"with_flanks_lengths\"]\n    if pt_with is None or tf_with is None:\n        raise ValueError(\"With-flanks processing models are required for this experiment.\")\n    n_flank_length = min(int(pt_with[\"n_flank\"]), int(tf_with[\"n_flank\"]))\n    c_flank_length = min(int(pt_with[\"c_flank\"]), int(tf_with[\"c_flank\"]))\n\n    peptides_df = _generate_uniform_peptides(\n        num_peptides=args.num_peptides,\n        lengths=lengths,\n        n_flank_length=n_flank_length,\n        c_flank_length=c_flank_length,\n        seed=args.seed,\n    )\n    dataset = _cross_join_dataset(peptides_df=peptides_df, alleles=alleles)\n    _pre_run_sanity_checks(peptides_df=peptides_df, dataset=dataset)\n\n    dataset_path = out_dir / \"dataset.csv.gz\"\n    peptides_path = out_dir / \"peptides.csv.gz\"\n    alleles_path = out_dir / \"alleles.txt\"\n    dataset.to_csv(dataset_path, index=False, compression=\"gzip\")\n    peptides_df.to_csv(peptides_path, index=False, compression=\"gzip\")\n    alleles_path.write_text(\"\\n\".join(alleles) + \"\\n\")\n\n    pt_predictions_path = out_dir / \"pt_predictions.csv.gz\"\n    tf_predictions_path = out_dir / \"tf_predictions.csv.gz\"\n    summary_path = out_dir / \"diff_summary.json\"\n    report_path = out_dir / \"diff_report.txt\"\n    outliers_path = out_dir / \"top_outliers.csv.gz\"\n\n    pt_predict_cmd = _self_cmd(\n        \"predict-backend\",\n        \"--repo-root\",\n        pt_repo_root,\n        \"--input-csv\",\n        str(dataset_path),\n        \"--output-csv\",\n        str(pt_predictions_path),\n        \"--batch-size\",\n        str(args.batch_size),\n        \"--centrality-measure\",\n        args.centrality_measure,\n    )\n    tf_predict_cmd = _self_cmd(\n        \"predict-backend\",\n        \"--repo-root\",\n        tf_repo_root,\n        \"--input-csv\",\n        str(dataset_path),\n        \"--output-csv\",\n        str(tf_predictions_path),\n        \"--batch-size\",\n        str(args.batch_size),\n        \"--centrality-measure\",\n        args.centrality_measure,\n    )\n    for cmd in (pt_predict_cmd, tf_predict_cmd):\n        _append_if_set(cmd, \"--class1-models-dir\", class1_models_dir)\n        _append_if_set(cmd, \"--presentation-models-dir\", presentation_models_dir)\n        _append_if_set(\n            cmd,\n            \"--processing-with-flanks-models-dir\",\n            processing_with_flanks_models_dir,\n        )\n        _append_if_set(\n            cmd,\n            \"--processing-without-flanks-models-dir\",\n            processing_without_flanks_models_dir,\n        )\n\n    _run_subprocess(pt_predict_cmd)\n    _run_subprocess(tf_predict_cmd)\n\n    _run_subprocess(\n        _self_cmd(\n            \"analyze\",\n            \"--pt-predictions-csv\",\n            str(pt_predictions_path),\n            \"--tf-predictions-csv\",\n            str(tf_predictions_path),\n            \"--summary-json\",\n            str(summary_path),\n            \"--report-txt\",\n            str(report_path),\n            \"--top-outliers-csv\",\n            str(outliers_path),\n            \"--top-k\",\n            str(args.top_k),\n        )\n    )\n\n    pt = pd.read_csv(pt_predictions_path, keep_default_na=False)\n    tf = pd.read_csv(tf_predictions_path, keep_default_na=False)\n    pt_presentation_stats = _enforce_presentation_score_requirements(\n        predictions=pt,\n        label=\"PyTorch\",\n    )\n    tf_presentation_stats = _enforce_presentation_score_requirements(\n        predictions=tf,\n        label=\"TensorFlow\",\n    )\n    merged = pt.merge(tf, on=BASE_COLUMNS, suffixes=(\"_pt\", \"_tf\"), how=\"inner\")\n    numeric_columns = _numeric_output_columns(pt)\n    diff_df = _make_diff_frame(merged, numeric_columns=numeric_columns)\n    output_summary_df = _per_output_summary(diff_df, numeric_columns=numeric_columns)\n    break_df, row_summary = _break_analysis(\n        merged=merged, diff_df=diff_df, numeric_columns=numeric_columns\n    )\n\n    output_summary_path = out_dir / \"output_diff_summary.csv\"\n    break_summary_path = out_dir / \"break_summary.csv\"\n    row_summary_path = out_dir / \"row_worst_diff.csv.gz\"\n    top_rows_path = out_dir / \"top_rows_by_any_output.csv.gz\"\n    output_summary_df.to_csv(output_summary_path, index=False)\n    break_df.to_csv(break_summary_path, index=False)\n    row_summary.to_csv(row_summary_path, index=False, compression=\"gzip\")\n    top_rows = row_summary.sort_values(\"max_abs_diff_any_output\", ascending=False).head(args.top_k)\n    top_rows.to_csv(top_rows_path, index=False, compression=\"gzip\")\n\n    _plot_output_ranges(output_summary_df, plots_dir / \"output_abs_diff_ranges.png\")\n    _plot_row_max_hist(row_summary, plots_dir / \"row_max_abs_diff_hist.png\")\n    if \"pres_with_presentation_score\" in numeric_columns:\n        _plot_length_breakdown(\n            diff_df,\n            target_output=\"pres_with_presentation_score\",\n            out_path=plots_dir / \"length_breakdown_pres_with_presentation_score.png\",\n        )\n    _plot_per_output_hists(\n        diff_df=diff_df,\n        numeric_columns=numeric_columns,\n        out_dir=plots_dir / \"per_output_abs_diff_hist\",\n    )\n\n    _write_break_report(\n        out_path=out_dir / \"break_analysis.txt\",\n        allele_count=len(alleles),\n        peptide_count=len(peptides_df),\n        pmhc_count=len(dataset),\n        lengths=lengths,\n        break_df=break_df,\n        row_summary=row_summary,\n        top_rows=top_rows,\n    )\n\n    metadata = {\n        \"allele_count\": len(alleles),\n        \"peptide_count\": int(len(peptides_df)),\n        \"pmhc_count\": int(len(dataset)),\n        \"lengths\": lengths,\n        \"n_flank_length\": n_flank_length,\n        \"c_flank_length\": c_flank_length,\n        \"allele_panel\": args.allele_panel,\n        \"pytorch_repo_root\": pt_repo_root,\n        \"tf_repo_root\": tf_repo_root,\n        \"presentation_sanity\": {\n            \"pytorch\": pt_presentation_stats,\n            \"tensorflow\": tf_presentation_stats,\n        },\n    }\n    with open(out_dir / \"run_metadata.json\", \"w\") as out:\n        json.dump(metadata, out, indent=2, sort_keys=True)\n\n    print(\"Alleles:\", len(alleles))\n    print(\"Peptides:\", len(peptides_df))\n    print(\"pMHC rows:\", len(dataset))\n    print(\"Flank lengths:\", {\"n_flank\": n_flank_length, \"c_flank\": c_flank_length})\n    print(\"Presentation sanity (PyTorch):\", pt_presentation_stats)\n    print(\"Presentation sanity (TensorFlow):\", tf_presentation_stats)\n    print(\"Dataset:\", dataset_path)\n    print(\"PT predictions:\", pt_predictions_path)\n    print(\"TF predictions:\", tf_predictions_path)\n    print(\"Diff summary:\", summary_path)\n    print(\"Output diff summary:\", output_summary_path)\n    print(\"Break summary:\", break_summary_path)\n    print(\"Break analysis:\", out_dir / \"break_analysis.txt\")\n    print(\"Plots dir:\", plots_dir)\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "scripts/extract_high_presentation_fixture.py",
    "content": "\"\"\"\nExtract high-presentation TF rows for release regression fixtures.\n\nGiven a TF predictions table from `cross_allele_parity_analysis.py` or\n`compare_tf_pytorch_random_outputs.py`, this script:\n1. Finds peptide+flank contexts where any allele has presentation score > threshold.\n2. Keeps all allele rows for those contexts (including low-scoring alleles).\n3. Writes a compact fixture CSV and metadata JSON for unit tests.\n\nExample:\n  python scripts/extract_high_presentation_fixture.py \\\n    --tf-predictions-csv /tmp/mhcflurry-cross-allele-1000-randflanks/tf_predictions.csv.gz \\\n    --out-csv test/data/master_released_class1_presentation_highscore_rows.csv.gz \\\n    --out-metadata-json test/data/master_released_class1_presentation_highscore_rows_metadata.json\n\"\"\"\n\nfrom __future__ import annotations\n\nimport argparse\nimport json\nfrom pathlib import Path\n\nimport pandas as pd\n\n\nCONTEXT_COLUMNS = [\"peptide\", \"n_flank\", \"c_flank\"]\nSCORE_COLUMNS = [\n    \"pres_with_presentation_score\",\n    \"pres_without_presentation_score\",\n]\n\n\ndef _collect_model_metadata() -> dict:\n    metadata = {}\n    try:\n        from mhcflurry import Class1PresentationPredictor  # pylint: disable=import-error\n        from mhcflurry.downloads import (  # pylint: disable=import-error\n            configure,\n            get_current_release,\n        )\n\n        configure()\n        predictor = Class1PresentationPredictor.load()\n        metadata.update(\n            {\n                \"release\": get_current_release(),\n                \"presentation_provenance\": predictor.provenance_string,\n                \"presentation_internal_affinity_provenance\": (\n                    predictor.affinity_predictor.provenance_string\n                ),\n            }\n        )\n    except Exception as exc:  # pragma: no cover - metadata capture is best-effort\n        metadata[\"model_metadata_error\"] = str(exc)\n    return metadata\n\n\ndef build_parser() -> argparse.ArgumentParser:\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"--tf-predictions-csv\", required=True)\n    parser.add_argument(\"--out-csv\", required=True)\n    parser.add_argument(\"--out-metadata-json\", required=True)\n    parser.add_argument(\"--high-score-threshold\", type=float, default=0.9)\n    parser.add_argument(\"--low-score-threshold\", type=float, default=0.2)\n    parser.add_argument(\n        \"--allow-contexts-without-low-alleles\",\n        action=\"store_true\",\n        help=(\n            \"Do not enforce that each selected peptide+flank context has at least one \"\n            \"allele below --low-score-threshold.\"\n        ),\n    )\n    return parser\n\n\ndef main() -> None:\n    args = build_parser().parse_args()\n\n    tf_predictions_path = Path(args.tf_predictions_csv).resolve()\n    out_csv = Path(args.out_csv).resolve()\n    out_metadata = Path(args.out_metadata_json).resolve()\n    out_csv.parent.mkdir(parents=True, exist_ok=True)\n    out_metadata.parent.mkdir(parents=True, exist_ok=True)\n\n    df = pd.read_csv(tf_predictions_path, keep_default_na=False)\n    missing = [c for c in CONTEXT_COLUMNS + SCORE_COLUMNS + [\"allele\"] if c not in df.columns]\n    if missing:\n        raise ValueError(\"TF predictions missing required columns: %s\" % missing)\n\n    context_max = df.groupby(CONTEXT_COLUMNS, observed=True)[SCORE_COLUMNS].max()\n    selected_context_mask = (\n        (context_max[\"pres_with_presentation_score\"] > args.high_score_threshold)\n        | (context_max[\"pres_without_presentation_score\"] > args.high_score_threshold)\n    )\n    selected_contexts = context_max[selected_context_mask].reset_index()\n    if selected_contexts.empty:\n        raise ValueError(\n            \"No contexts found above high score threshold %.3f\"\n            % args.high_score_threshold\n        )\n\n    selected = df.merge(selected_contexts[CONTEXT_COLUMNS], on=CONTEXT_COLUMNS, how=\"inner\")\n    selected = selected.sort_values(CONTEXT_COLUMNS + [\"allele\"]).reset_index(drop=True)\n\n    expected_allele_count = int(df[\"allele\"].nunique())\n    alleles_per_context = selected.groupby(CONTEXT_COLUMNS, observed=True)[\"allele\"].nunique()\n    if not (alleles_per_context == expected_allele_count).all():\n        bad = alleles_per_context[alleles_per_context != expected_allele_count]\n        raise ValueError(\n            \"Expected %d alleles per selected context; got mismatches for %d contexts.\"\n            % (expected_allele_count, int(bad.shape[0]))\n        )\n\n    low_score_stats = {}\n    for score_col in SCORE_COLUMNS:\n        context_min = selected.groupby(CONTEXT_COLUMNS, observed=True)[score_col].min()\n        contexts_with_low = int((context_min < args.low_score_threshold).sum())\n        low_score_stats[score_col] = {\n            \"contexts_with_low_allele\": contexts_with_low,\n            \"context_count\": int(context_min.shape[0]),\n            \"low_score_threshold\": args.low_score_threshold,\n        }\n        if (\n            contexts_with_low < int(context_min.shape[0])\n            and not args.allow_contexts_without_low_alleles\n        ):\n            raise ValueError(\n                \"Selected contexts do not all include low-scoring alleles for %s \"\n                \"(%d/%d below %.3f).\"\n                % (\n                    score_col,\n                    contexts_with_low,\n                    int(context_min.shape[0]),\n                    args.low_score_threshold,\n                )\n            )\n\n    selected.to_csv(out_csv, index=False, compression=\"gzip\")\n\n    metadata = {\n        \"source_tf_predictions_csv\": str(tf_predictions_path),\n        \"row_count\": int(selected.shape[0]),\n        \"context_count\": int(selected_contexts.shape[0]),\n        \"allele_count\": expected_allele_count,\n        \"high_score_threshold\": float(args.high_score_threshold),\n        \"low_score_threshold\": float(args.low_score_threshold),\n        \"score_columns\": SCORE_COLUMNS,\n        \"low_score_stats\": low_score_stats,\n    }\n    metadata.update(_collect_model_metadata())\n\n    with open(out_metadata, \"w\") as out:\n        json.dump(metadata, out, indent=2, sort_keys=True)\n\n    print(\"Wrote fixture rows:\", selected.shape[0])\n    print(\"Selected contexts:\", selected_contexts.shape[0])\n    print(\"Alleles per context:\", expected_allele_count)\n    print(\"Fixture CSV:\", out_csv)\n    print(\"Fixture metadata:\", out_metadata)\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "scripts/generate_fixture_error_report.py",
    "content": "\"\"\"\nGenerate an HTML parity/error report against cached master-release fixtures.\n\nThe report compares the current branch's released affinity and presentation\npredictors against fixture data stored under ``test/data``. It writes a\nself-contained HTML file with inline SVG plots plus CSV/JSON summaries.\n\nExample:\n  ./.venv/bin/python scripts/generate_fixture_error_report.py \\\n    --out-dir /tmp/mhcflurry-fixture-error-report\n\"\"\"\n\nfrom __future__ import annotations\n\nimport argparse\nimport html\nimport json\nimport math\nimport warnings\nfrom dataclasses import dataclass\nfrom datetime import datetime\nfrom pathlib import Path\nfrom typing import Iterable\n\nimport numpy as np\nimport pandas as pd\n\nfrom mhcflurry import Class1AffinityPredictor, Class1PresentationPredictor\nfrom mhcflurry.downloads import (\n    configure,\n    get_current_release,\n    get_default_class1_models_dir,\n    get_default_class1_presentation_models_dir,\n    get_path,\n)\nfrom mhcflurry.testing_utils import cleanup, startup\n\n\nDATA_DIR = Path(__file__).resolve().parents[1] / \"test\" / \"data\"\nAFFINITY_FIXTURE_PATH = DATA_DIR / \"master_released_class1_affinity_predictions.json\"\nPRESENTATION_FIXTURE_PATH = (\n    DATA_DIR / \"master_released_class1_presentation_highscore_rows.csv.gz\"\n)\nPRESENTATION_METADATA_PATH = (\n    DATA_DIR / \"master_released_class1_presentation_highscore_rows_metadata.json\"\n)\nBASE_COLUMNS = [\"row_id\", \"peptide\", \"allele\", \"n_flank\", \"c_flank\"]\nSVG_NS = \"http://www.w3.org/2000/svg\"\n\n\n@dataclass\nclass MetricReport:\n    key: str\n    title: str\n    section: str\n    unit: str\n    df: pd.DataFrame\n    reference_label: str\n    current_label: str\n    log_scale: bool = False\n    use_relative_histogram: bool = False\n\n    @property\n    def summary(self) -> dict:\n        error = self.df[\"error\"].to_numpy(dtype=np.float64)\n        abs_error = self.df[\"abs_error\"].to_numpy(dtype=np.float64)\n        result = {\n            \"count\": int(len(self.df)),\n            \"mean_error\": float(error.mean()) if len(error) else float(\"nan\"),\n            \"mean_abs_error\": float(abs_error.mean()) if len(abs_error) else float(\"nan\"),\n            \"rmse\": float(np.sqrt(np.mean(np.square(error)))) if len(error) else float(\"nan\"),\n            \"max_abs_error\": float(abs_error.max()) if len(abs_error) else float(\"nan\"),\n        }\n        if \"abs_pct_error\" in self.df.columns:\n            abs_pct = self.df[\"abs_pct_error\"].to_numpy(dtype=np.float64)\n            result[\"mean_abs_pct_error\"] = float(abs_pct.mean()) if len(abs_pct) else float(\"nan\")\n            result[\"max_abs_pct_error\"] = float(abs_pct.max()) if len(abs_pct) else float(\"nan\")\n        return result\n\n\ndef build_parser() -> argparse.ArgumentParser:\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\n        \"--out-dir\",\n        default=\"/tmp/mhcflurry-fixture-error-report\",\n        help=\"Directory to receive the HTML report, CSVs, and summary JSON.\",\n    )\n    return parser\n\n\ndef _format_number(value: float, digits: int = 6) -> str:\n    if value is None or (isinstance(value, float) and not math.isfinite(value)):\n        return \"nan\"\n    abs_value = abs(value)\n    if abs_value == 0.0:\n        return \"0\"\n    if abs_value >= 1e4 or abs_value < 1e-4:\n        return f\"{value:.3e}\"\n    return f\"{value:.{digits}f}\"\n\n\ndef _format_percent(value: float) -> str:\n    if value is None or (isinstance(value, float) and not math.isfinite(value)):\n        return \"nan\"\n    return f\"{value:.3e}%\"\n\n\ndef _as_float_array(values: Iterable[float]) -> np.ndarray:\n    return np.asarray(list(values), dtype=np.float64)\n\n\ndef _clip_positive(values: np.ndarray) -> np.ndarray:\n    positive = values[values > 0]\n    floor = float(positive.min()) if positive.size else 1e-12\n    return np.clip(values, floor, None)\n\n\ndef _make_error_frame(\n    base_df: pd.DataFrame,\n    reference_column: str,\n    current_column: str,\n    extra_columns: list[str] | None = None,\n) -> pd.DataFrame:\n    extra_columns = extra_columns or []\n    keep_columns = [c for c in BASE_COLUMNS if c in base_df.columns] + extra_columns\n    result = base_df[keep_columns].copy()\n    result[\"reference\"] = base_df[reference_column].to_numpy(dtype=np.float64)\n    result[\"current\"] = base_df[current_column].to_numpy(dtype=np.float64)\n    result[\"error\"] = result[\"current\"] - result[\"reference\"]\n    result[\"abs_error\"] = result[\"error\"].abs()\n    positive_ref = np.clip(np.abs(result[\"reference\"]), 1e-12, None)\n    result[\"pct_error\"] = 100.0 * result[\"error\"] / positive_ref\n    result[\"abs_pct_error\"] = result[\"pct_error\"].abs()\n    return result.sort_values(\"abs_error\", ascending=False).reset_index(drop=True)\n\n\ndef _load_affinity_fixture() -> dict:\n    with AFFINITY_FIXTURE_PATH.open(\"r\") as handle:\n        return json.load(handle)\n\n\ndef _load_presentation_fixture() -> tuple[pd.DataFrame, dict]:\n    fixture_df = pd.read_csv(PRESENTATION_FIXTURE_PATH, keep_default_na=False)\n    with PRESENTATION_METADATA_PATH.open(\"r\") as handle:\n        metadata = json.load(handle)\n    return fixture_df, metadata\n\n\ndef _predict_current_outputs() -> tuple[dict, pd.DataFrame, dict]:\n    configure()\n    default_affinity = Class1AffinityPredictor.load(get_default_class1_models_dir())\n    allele_specific = Class1AffinityPredictor.load(get_path(\"models_class1\", \"models\"))\n    pan = Class1AffinityPredictor.load(get_path(\"models_class1_pan\", \"models.combined\"))\n    presentation_predictor = Class1PresentationPredictor.load(\n        get_default_class1_presentation_models_dir()\n    )\n\n    affinity_fixture = _load_affinity_fixture()\n    fixture_df, presentation_metadata = _load_presentation_fixture()\n\n    spec_fx = affinity_fixture[\"allele_specific\"]\n    pan_fx = affinity_fixture[\"pan_allele\"]\n    spec_current = allele_specific.predict(\n        peptides=spec_fx[\"peptides\"],\n        alleles=spec_fx[\"alleles\"],\n    )\n    pan_current = pan.predict(\n        peptides=pan_fx[\"peptides\"],\n        alleles=pan_fx[\"alleles\"],\n    )\n\n    spec_df = pd.DataFrame(\n        {\n            \"peptide\": spec_fx[\"peptides\"],\n            \"allele\": spec_fx[\"alleles\"],\n            \"reference\": np.asarray(spec_fx[\"predictions\"], dtype=np.float64),\n            \"current\": np.asarray(spec_current, dtype=np.float64),\n        }\n    )\n    spec_df[\"error\"] = spec_df[\"current\"] - spec_df[\"reference\"]\n    spec_df[\"abs_error\"] = spec_df[\"error\"].abs()\n    spec_df[\"pct_error\"] = 100.0 * spec_df[\"error\"] / np.clip(\n        spec_df[\"reference\"].abs(), 1e-12, None\n    )\n    spec_df[\"abs_pct_error\"] = spec_df[\"pct_error\"].abs()\n    spec_df = spec_df.sort_values(\"abs_error\", ascending=False).reset_index(drop=True)\n\n    pan_df = pd.DataFrame(\n        {\n            \"peptide\": pan_fx[\"peptides\"],\n            \"allele\": pan_fx[\"alleles\"],\n            \"reference\": np.asarray(pan_fx[\"predictions\"], dtype=np.float64),\n            \"current\": np.asarray(pan_current, dtype=np.float64),\n        }\n    )\n    pan_df[\"error\"] = pan_df[\"current\"] - pan_df[\"reference\"]\n    pan_df[\"abs_error\"] = pan_df[\"error\"].abs()\n    pan_df[\"pct_error\"] = 100.0 * pan_df[\"error\"] / np.clip(\n        pan_df[\"reference\"].abs(), 1e-12, None\n    )\n    pan_df[\"abs_pct_error\"] = pan_df[\"pct_error\"].abs()\n    pan_df = pan_df.sort_values(\"abs_error\", ascending=False).reset_index(drop=True)\n\n    peptides = fixture_df[\"peptide\"].tolist()\n    alleles = fixture_df[\"allele\"].tolist()\n    n_flanks = fixture_df[\"n_flank\"].tolist()\n    c_flanks = fixture_df[\"c_flank\"].tolist()\n\n    with warnings.catch_warnings():\n        warnings.filterwarnings(\n            \"ignore\",\n            message=r\".*Downcasting behavior in `replace` is deprecated.*\",\n            category=FutureWarning,\n        )\n        affinity_df = default_affinity.predict_to_dataframe(\n            peptides=peptides,\n            alleles=alleles,\n            throw=False,\n            include_percentile_ranks=True,\n            include_confidence_intervals=True,\n            centrality_measure=\"mean\",\n            model_kwargs={\"batch_size\": 4096},\n        )\n\n    sample_names = alleles\n    allele_map = {allele: [allele] for allele in sorted(set(alleles))}\n    with warnings.catch_warnings():\n        warnings.filterwarnings(\n            \"ignore\",\n            message=r\".*Downcasting behavior in `replace` is deprecated.*\",\n            category=FutureWarning,\n        )\n        pres_with_df = presentation_predictor.predict(\n            peptides=peptides,\n            alleles=allele_map,\n            sample_names=sample_names,\n            n_flanks=n_flanks,\n            c_flanks=c_flanks,\n            include_affinity_percentile=True,\n            verbose=0,\n            throw=True,\n        ).sort_values(\"peptide_num\")\n        pres_without_df = presentation_predictor.predict(\n            peptides=peptides,\n            alleles=allele_map,\n            sample_names=sample_names,\n            n_flanks=None,\n            c_flanks=None,\n            include_affinity_percentile=True,\n            verbose=0,\n            throw=True,\n        ).sort_values(\"peptide_num\")\n\n    predicted = fixture_df[BASE_COLUMNS].copy()\n    predicted[\"affinity_prediction_current\"] = affinity_df[\"prediction\"].to_numpy(\n        dtype=np.float64\n    )\n    predicted[\"processing_with_score_current\"] = pres_with_df[\"processing_score\"].to_numpy(\n        dtype=np.float64\n    )\n    predicted[\n        \"pres_with_presentation_score_current\"\n    ] = pres_with_df[\"presentation_score\"].to_numpy(dtype=np.float64)\n    predicted[\"processing_without_score_current\"] = pres_without_df[\n        \"processing_score\"\n    ].to_numpy(dtype=np.float64)\n    predicted[\n        \"pres_without_presentation_score_current\"\n    ] = pres_without_df[\"presentation_score\"].to_numpy(dtype=np.float64)\n\n    current_metadata = {\n        \"release\": get_current_release(),\n        \"presentation_provenance\": presentation_predictor.provenance_string,\n        \"presentation_internal_affinity_provenance\": (\n            presentation_predictor.affinity_predictor.provenance_string\n        ),\n    }\n    combined_metadata = {\n        \"affinity_fixture_release\": affinity_fixture.get(\"release\"),\n        \"presentation_fixture\": presentation_metadata,\n        \"current\": current_metadata,\n    }\n    return {\n        \"released_affinity_allele_specific\": spec_df,\n        \"released_affinity_pan_allele\": pan_df,\n        \"presentation_fixture_predictions\": predicted,\n    }, fixture_df, combined_metadata\n\n\ndef _compute_metric_reports(\n    current_outputs: dict,\n    fixture_df: pd.DataFrame,\n) -> list[MetricReport]:\n    predicted = current_outputs[\"presentation_fixture_predictions\"]\n    combined = fixture_df[BASE_COLUMNS].copy()\n    for column in [\n        \"affinity_prediction\",\n        \"processing_with_score\",\n        \"pres_with_presentation_score\",\n        \"processing_without_score\",\n        \"pres_without_presentation_score\",\n    ]:\n        combined[column] = fixture_df[column]\n    for column in predicted.columns:\n        if column.endswith(\"_current\"):\n            combined[column] = predicted[column]\n\n    return [\n        MetricReport(\n            key=\"released_affinity_allele_specific\",\n            title=\"Released affinity parity: allele-specific models\",\n            section=\"Affinity\",\n            unit=\"nM\",\n            df=current_outputs[\"released_affinity_allele_specific\"],\n            reference_label=\"master fixture affinity\",\n            current_label=\"current branch affinity\",\n            log_scale=True,\n            use_relative_histogram=True,\n        ),\n        MetricReport(\n            key=\"released_affinity_pan_allele\",\n            title=\"Released affinity parity: pan-allele models\",\n            section=\"Affinity\",\n            unit=\"nM\",\n            df=current_outputs[\"released_affinity_pan_allele\"],\n            reference_label=\"master fixture affinity\",\n            current_label=\"current branch affinity\",\n            log_scale=True,\n            use_relative_histogram=True,\n        ),\n        MetricReport(\n            key=\"presentation_fixture_affinity\",\n            title=\"Presentation fixture affinity\",\n            section=\"Affinity\",\n            unit=\"nM\",\n            df=_make_error_frame(\n                combined,\n                \"affinity_prediction\",\n                \"affinity_prediction_current\",\n            ),\n            reference_label=\"master fixture affinity\",\n            current_label=\"current branch affinity\",\n            log_scale=True,\n            use_relative_histogram=True,\n        ),\n        MetricReport(\n            key=\"processing_with_score\",\n            title=\"Processing score with flanks\",\n            section=\"Processing\",\n            unit=\"score\",\n            df=_make_error_frame(\n                combined,\n                \"processing_with_score\",\n                \"processing_with_score_current\",\n            ),\n            reference_label=\"master fixture processing score\",\n            current_label=\"current branch processing score\",\n        ),\n        MetricReport(\n            key=\"processing_without_score\",\n            title=\"Processing score without flanks\",\n            section=\"Processing\",\n            unit=\"score\",\n            df=_make_error_frame(\n                combined,\n                \"processing_without_score\",\n                \"processing_without_score_current\",\n            ),\n            reference_label=\"master fixture processing score\",\n            current_label=\"current branch processing score\",\n        ),\n        MetricReport(\n            key=\"presentation_with_score\",\n            title=\"Presentation score with flanks\",\n            section=\"Presentation\",\n            unit=\"score\",\n            df=_make_error_frame(\n                combined,\n                \"pres_with_presentation_score\",\n                \"pres_with_presentation_score_current\",\n            ),\n            reference_label=\"master fixture presentation score\",\n            current_label=\"current branch presentation score\",\n        ),\n        MetricReport(\n            key=\"presentation_without_score\",\n            title=\"Presentation score without flanks\",\n            section=\"Presentation\",\n            unit=\"score\",\n            df=_make_error_frame(\n                combined,\n                \"pres_without_presentation_score\",\n                \"pres_without_presentation_score_current\",\n            ),\n            reference_label=\"master fixture presentation score\",\n            current_label=\"current branch presentation score\",\n        ),\n    ]\n\n\ndef _axis_range(values: np.ndarray) -> tuple[float, float]:\n    data_min = float(np.min(values))\n    data_max = float(np.max(values))\n    if data_min == data_max:\n        pad = 1.0 if data_min == 0.0 else abs(data_min) * 0.05\n        return data_min - pad, data_max + pad\n    pad = (data_max - data_min) * 0.05\n    return data_min - pad, data_max + pad\n\n\ndef _tick_values(start: float, stop: float, count: int = 5) -> list[float]:\n    if count < 2:\n        return [start]\n    return [start + (stop - start) * i / (count - 1) for i in range(count)]\n\n\ndef _format_tick(value: float, log_scale: bool = False) -> str:\n    if log_scale:\n        return _format_number(10.0 ** value, digits=3)\n    return _format_number(value, digits=3)\n\n\ndef _render_scatter_svg(\n    df: pd.DataFrame,\n    title: str,\n    log_scale: bool,\n    x_label: str,\n    y_label: str,\n) -> str:\n    width = 520\n    height = 360\n    margin_left = 70\n    margin_right = 24\n    margin_top = 36\n    margin_bottom = 58\n    plot_width = width - margin_left - margin_right\n    plot_height = height - margin_top - margin_bottom\n\n    x = df[\"reference\"].to_numpy(dtype=np.float64)\n    y = df[\"current\"].to_numpy(dtype=np.float64)\n    if log_scale:\n        x = np.log10(_clip_positive(x))\n        y = np.log10(_clip_positive(y))\n\n    combined = np.concatenate([x, y])\n    x_min, x_max = _axis_range(combined)\n    y_min, y_max = x_min, x_max\n\n    def scale_x(value: float) -> float:\n        return margin_left + (value - x_min) / (x_max - x_min) * plot_width\n\n    def scale_y(value: float) -> float:\n        return margin_top + plot_height - (value - y_min) / (y_max - y_min) * plot_height\n\n    parts = [\n        f'<svg xmlns=\"{SVG_NS}\" width=\"{width}\" height=\"{height}\" viewBox=\"0 0 {width} {height}\" role=\"img\" aria-label=\"{html.escape(title)}\">',\n        f'<rect x=\"0\" y=\"0\" width=\"{width}\" height=\"{height}\" fill=\"#fffdfa\" rx=\"18\" />',\n        f'<text x=\"{margin_left}\" y=\"22\" fill=\"#1f2937\" font-size=\"16\" font-weight=\"700\">{html.escape(title)}</text>',\n    ]\n\n    for tick in _tick_values(x_min, x_max):\n        x_pos = scale_x(tick)\n        y_pos = scale_y(tick)\n        parts.append(\n            f'<line x1=\"{x_pos:.2f}\" y1=\"{margin_top}\" x2=\"{x_pos:.2f}\" y2=\"{margin_top + plot_height}\" stroke=\"#ece4d8\" stroke-width=\"1\" />'\n        )\n        parts.append(\n            f'<line x1=\"{margin_left}\" y1=\"{y_pos:.2f}\" x2=\"{margin_left + plot_width}\" y2=\"{y_pos:.2f}\" stroke=\"#ece4d8\" stroke-width=\"1\" />'\n        )\n        tick_label = html.escape(_format_tick(tick, log_scale=log_scale))\n        parts.append(\n            f'<text x=\"{x_pos:.2f}\" y=\"{height - 18}\" fill=\"#6b7280\" font-size=\"11\" text-anchor=\"middle\">{tick_label}</text>'\n        )\n        parts.append(\n            f'<text x=\"56\" y=\"{y_pos + 4:.2f}\" fill=\"#6b7280\" font-size=\"11\" text-anchor=\"end\">{tick_label}</text>'\n        )\n\n    parts.append(\n        f'<line x1=\"{margin_left}\" y1=\"{margin_top + plot_height}\" x2=\"{margin_left + plot_width}\" y2=\"{margin_top}\" stroke=\"#9ca3af\" stroke-width=\"2\" stroke-dasharray=\"6 5\" />'\n    )\n    parts.append(\n        f'<rect x=\"{margin_left}\" y=\"{margin_top}\" width=\"{plot_width}\" height=\"{plot_height}\" fill=\"none\" stroke=\"#9a8f83\" stroke-width=\"1.5\" />'\n    )\n\n    for x_val, y_val in zip(x, y):\n        parts.append(\n            f'<circle cx=\"{scale_x(float(x_val)):.2f}\" cy=\"{scale_y(float(y_val)):.2f}\" r=\"4.2\" fill=\"#0f766e\" fill-opacity=\"0.8\" />'\n        )\n\n    parts.append(\n        f'<text x=\"{margin_left + plot_width / 2:.2f}\" y=\"{height - 4}\" fill=\"#374151\" font-size=\"12\" text-anchor=\"middle\">{html.escape(x_label)}</text>'\n    )\n    parts.append(\n        f'<text x=\"18\" y=\"{margin_top + plot_height / 2:.2f}\" fill=\"#374151\" font-size=\"12\" text-anchor=\"middle\" transform=\"rotate(-90 18 {margin_top + plot_height / 2:.2f})\">{html.escape(y_label)}</text>'\n    )\n    parts.append(\"</svg>\")\n    return \"\".join(parts)\n\n\ndef _render_histogram_svg(\n    values: np.ndarray,\n    title: str,\n    x_label: str,\n    color: str,\n) -> str:\n    width = 520\n    height = 280\n    margin_left = 70\n    margin_right = 24\n    margin_top = 36\n    margin_bottom = 58\n    plot_width = width - margin_left - margin_right\n    plot_height = height - margin_top - margin_bottom\n\n    finite_values = values[np.isfinite(values)]\n    if finite_values.size == 0:\n        finite_values = np.array([0.0], dtype=np.float64)\n\n    bound = float(np.max(np.abs(finite_values)))\n    if bound == 0.0:\n        bound = 1e-12\n    bins = min(24, max(8, int(math.sqrt(finite_values.size)) * 2))\n    hist, edges = np.histogram(finite_values, bins=bins, range=(-bound, bound))\n    y_max = max(int(hist.max()), 1)\n\n    def scale_x(value: float) -> float:\n        return margin_left + (value + bound) / (2.0 * bound) * plot_width\n\n    def scale_y(value: float) -> float:\n        return margin_top + plot_height - value / y_max * plot_height\n\n    bar_width = plot_width / bins\n    parts = [\n        f'<svg xmlns=\"{SVG_NS}\" width=\"{width}\" height=\"{height}\" viewBox=\"0 0 {width} {height}\" role=\"img\" aria-label=\"{html.escape(title)}\">',\n        f'<rect x=\"0\" y=\"0\" width=\"{width}\" height=\"{height}\" fill=\"#fffdfa\" rx=\"18\" />',\n        f'<text x=\"{margin_left}\" y=\"22\" fill=\"#1f2937\" font-size=\"16\" font-weight=\"700\">{html.escape(title)}</text>',\n    ]\n\n    for tick in _tick_values(-bound, bound):\n        x_pos = scale_x(tick)\n        parts.append(\n            f'<line x1=\"{x_pos:.2f}\" y1=\"{margin_top}\" x2=\"{x_pos:.2f}\" y2=\"{margin_top + plot_height}\" stroke=\"#ece4d8\" stroke-width=\"1\" />'\n        )\n        parts.append(\n            f'<text x=\"{x_pos:.2f}\" y=\"{height - 18}\" fill=\"#6b7280\" font-size=\"11\" text-anchor=\"middle\">{html.escape(_format_number(tick, digits=3))}</text>'\n        )\n\n    for tick in _tick_values(0.0, float(y_max), count=5):\n        y_pos = scale_y(tick)\n        parts.append(\n            f'<line x1=\"{margin_left}\" y1=\"{y_pos:.2f}\" x2=\"{margin_left + plot_width}\" y2=\"{y_pos:.2f}\" stroke=\"#ece4d8\" stroke-width=\"1\" />'\n        )\n        parts.append(\n            f'<text x=\"56\" y=\"{y_pos + 4:.2f}\" fill=\"#6b7280\" font-size=\"11\" text-anchor=\"end\">{int(round(tick))}</text>'\n        )\n\n    parts.append(\n        f'<rect x=\"{margin_left}\" y=\"{margin_top}\" width=\"{plot_width}\" height=\"{plot_height}\" fill=\"none\" stroke=\"#9a8f83\" stroke-width=\"1.5\" />'\n    )\n    for i, count in enumerate(hist):\n        x_pos = margin_left + i * bar_width + 1.5\n        y_pos = scale_y(float(count))\n        bar_height = margin_top + plot_height - y_pos\n        parts.append(\n            f'<rect x=\"{x_pos:.2f}\" y=\"{y_pos:.2f}\" width=\"{max(bar_width - 3.0, 1.0):.2f}\" height=\"{bar_height:.2f}\" fill=\"{color}\" fill-opacity=\"0.85\" />'\n        )\n\n    zero_x = scale_x(0.0)\n    parts.append(\n        f'<line x1=\"{zero_x:.2f}\" y1=\"{margin_top}\" x2=\"{zero_x:.2f}\" y2=\"{margin_top + plot_height}\" stroke=\"#111827\" stroke-width=\"1.5\" stroke-dasharray=\"4 4\" />'\n    )\n    parts.append(\n        f'<text x=\"{margin_left + plot_width / 2:.2f}\" y=\"{height - 4}\" fill=\"#374151\" font-size=\"12\" text-anchor=\"middle\">{html.escape(x_label)}</text>'\n    )\n    parts.append(\n        f'<text x=\"18\" y=\"{margin_top + plot_height / 2:.2f}\" fill=\"#374151\" font-size=\"12\" text-anchor=\"middle\" transform=\"rotate(-90 18 {margin_top + plot_height / 2:.2f})\">count</text>'\n    )\n    parts.append(\"</svg>\")\n    return \"\".join(parts)\n\n\ndef _render_summary_table(reports: list[MetricReport]) -> str:\n    rows = []\n    for report in reports:\n        summary = report.summary\n        mean_abs_pct = summary.get(\"mean_abs_pct_error\")\n        max_abs_pct = summary.get(\"max_abs_pct_error\")\n        rows.append(\n            \"<tr>\"\n            f\"<td>{html.escape(report.section)}</td>\"\n            f\"<td>{html.escape(report.title)}</td>\"\n            f\"<td>{summary['count']}</td>\"\n            f\"<td>{html.escape(_format_number(summary['mean_abs_error']))}</td>\"\n            f\"<td>{html.escape(_format_number(summary['max_abs_error']))}</td>\"\n            f\"<td>{html.escape(_format_number(summary['rmse']))}</td>\"\n            f\"<td>{html.escape(_format_percent(mean_abs_pct)) if mean_abs_pct is not None else '-'}</td>\"\n            f\"<td>{html.escape(_format_percent(max_abs_pct)) if max_abs_pct is not None else '-'}</td>\"\n            \"</tr>\"\n        )\n    return (\n        \"<table class='summary-table'>\"\n        \"<thead><tr><th>Section</th><th>Metric</th><th>N</th>\"\n        \"<th>Mean abs error</th><th>Max abs error</th><th>RMSE</th>\"\n        \"<th>Mean abs pct error</th><th>Max abs pct error</th></tr></thead>\"\n        \"<tbody>\"\n        + \"\".join(rows)\n        + \"</tbody></table>\"\n    )\n\n\ndef _render_top_error_table(report: MetricReport, limit: int = 10) -> str:\n    columns = [c for c in [\"row_id\", \"peptide\", \"allele\", \"n_flank\", \"c_flank\"] if c in report.df]\n    columns += [\"reference\", \"current\", \"error\", \"abs_error\"]\n    if \"abs_pct_error\" in report.df.columns:\n        columns.append(\"abs_pct_error\")\n    subset = report.df[columns].head(limit)\n    header = \"\".join(f\"<th>{html.escape(col)}</th>\" for col in subset.columns)\n    body_rows = []\n    for _, row in subset.iterrows():\n        cells = []\n        for col in subset.columns:\n            value = row[col]\n            if isinstance(value, (float, np.floating)):\n                if col == \"abs_pct_error\":\n                    formatted = _format_percent(float(value))\n                else:\n                    formatted = _format_number(float(value))\n            else:\n                formatted = str(value)\n            cells.append(f\"<td>{html.escape(formatted)}</td>\")\n        body_rows.append(\"<tr>\" + \"\".join(cells) + \"</tr>\")\n    return (\n        \"<table class='detail-table'>\"\n        f\"<thead><tr>{header}</tr></thead>\"\n        \"<tbody>\"\n        + \"\".join(body_rows)\n        + \"</tbody></table>\"\n    )\n\n\ndef _render_metric_section(report: MetricReport) -> str:\n    hist_values = (\n        report.df[\"pct_error\"].to_numpy(dtype=np.float64)\n        if report.use_relative_histogram\n        else report.df[\"error\"].to_numpy(dtype=np.float64)\n    )\n    hist_label = \"percent error (%)\" if report.use_relative_histogram else \"signed error\"\n    scatter_svg = _render_scatter_svg(\n        report.df,\n        title=report.title,\n        log_scale=report.log_scale,\n        x_label=report.reference_label + (\" (log10)\" if report.log_scale else \"\"),\n        y_label=report.current_label + (\" (log10)\" if report.log_scale else \"\"),\n    )\n    hist_svg = _render_histogram_svg(\n        hist_values,\n        title=report.title + \" error distribution\",\n        x_label=hist_label,\n        color=\"#c2410c\" if report.use_relative_histogram else \"#2563eb\",\n    )\n    summary = report.summary\n    metric_blurb = (\n        f\"N={summary['count']}, mean abs error={_format_number(summary['mean_abs_error'])} {report.unit}, \"\n        f\"max abs error={_format_number(summary['max_abs_error'])} {report.unit}, \"\n        f\"RMSE={_format_number(summary['rmse'])} {report.unit}.\"\n    )\n    if report.use_relative_histogram:\n        metric_blurb += (\n            \" Mean abs pct error=\"\n            + _format_percent(summary[\"mean_abs_pct_error\"])\n            + \", max abs pct error=\"\n            + _format_percent(summary[\"max_abs_pct_error\"])\n            + \".\"\n        )\n    return (\n        \"<section class='metric-section'>\"\n        f\"<h3>{html.escape(report.title)}</h3>\"\n        f\"<p class='metric-blurb'>{html.escape(metric_blurb)}</p>\"\n        \"<div class='plot-grid'>\"\n        f\"<div class='plot-card'>{scatter_svg}</div>\"\n        f\"<div class='plot-card'>{hist_svg}</div>\"\n        \"</div>\"\n        \"<div class='table-wrap'>\"\n        \"<h4>Largest absolute errors</h4>\"\n        f\"{_render_top_error_table(report)}\"\n        \"</div>\"\n        \"</section>\"\n    )\n\n\ndef _write_outputs(\n    out_dir: Path,\n    reports: list[MetricReport],\n    metadata: dict,\n) -> None:\n    out_dir.mkdir(parents=True, exist_ok=True)\n    csv_dir = out_dir / \"csv\"\n    csv_dir.mkdir(parents=True, exist_ok=True)\n\n    summary_json = {\n        \"generated_at\": datetime.now().isoformat(),\n        \"metadata\": metadata,\n        \"metrics\": {report.key: report.summary for report in reports},\n    }\n    for report in reports:\n        report.df.to_csv(csv_dir / f\"{report.key}.csv\", index=False)\n\n    with (out_dir / \"summary.json\").open(\"w\") as handle:\n        json.dump(summary_json, handle, indent=2, sort_keys=True)\n\n    sections = []\n    grouped_reports = {\n        \"Affinity\": [r for r in reports if r.section == \"Affinity\"],\n        \"Processing\": [r for r in reports if r.section == \"Processing\"],\n        \"Presentation\": [r for r in reports if r.section == \"Presentation\"],\n    }\n    for section_name, section_reports in grouped_reports.items():\n        sections.append(\n            f\"<section class='major-section'><h2>{html.escape(section_name)}</h2>\"\n            + \"\".join(_render_metric_section(report) for report in section_reports)\n            + \"</section>\"\n        )\n\n    notes = []\n    affinity_fixture_release = metadata.get(\"affinity_fixture_release\")\n    current_release = metadata[\"current\"][\"release\"]\n    if affinity_fixture_release and affinity_fixture_release != current_release:\n        notes.append(\n            \"Affinity fixture release \"\n            + str(affinity_fixture_release)\n            + \" does not match current downloads release \"\n            + str(current_release)\n            + \".\"\n        )\n    presentation_fixture = metadata.get(\"presentation_fixture\", {})\n    if (\n        presentation_fixture.get(\"release\")\n        and presentation_fixture.get(\"release\") != current_release\n    ):\n        notes.append(\n            \"Presentation fixture release \"\n            + str(presentation_fixture.get(\"release\"))\n            + \" does not match current downloads release \"\n            + str(current_release)\n            + \".\"\n        )\n    if (\n        presentation_fixture.get(\"presentation_provenance\")\n        and presentation_fixture.get(\"presentation_provenance\")\n        != metadata[\"current\"][\"presentation_provenance\"]\n    ):\n        notes.append(\"Presentation predictor provenance differs from fixture metadata.\")\n    if (\n        presentation_fixture.get(\"presentation_internal_affinity_provenance\")\n        and presentation_fixture.get(\"presentation_internal_affinity_provenance\")\n        != metadata[\"current\"][\"presentation_internal_affinity_provenance\"]\n    ):\n        notes.append(\"Internal affinity provenance differs from fixture metadata.\")\n    note_html = (\n        \"<div class='notes'>\"\n        + \"\".join(f\"<p>{html.escape(note)}</p>\" for note in notes)\n        + \"</div>\"\n        if notes\n        else \"<div class='notes ok'><p>Fixture metadata matches the current downloaded release and predictor provenance.</p></div>\"\n    )\n\n    html_text = f\"\"\"<!doctype html>\n<html lang=\"en\">\n<head>\n  <meta charset=\"utf-8\" />\n  <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n  <title>MHCflurry fixture error report</title>\n  <style>\n    :root {{\n      --bg: #f5efe3;\n      --panel: #fffdfa;\n      --ink: #1f2937;\n      --muted: #6b7280;\n      --line: #d6c7b5;\n      --accent: #0f766e;\n      --warn: #b45309;\n    }}\n    body {{\n      margin: 0;\n      font-family: \"Iowan Old Style\", \"Palatino Linotype\", \"Book Antiqua\", serif;\n      color: var(--ink);\n      background:\n        radial-gradient(circle at top left, rgba(15, 118, 110, 0.12), transparent 30%),\n        radial-gradient(circle at top right, rgba(194, 65, 12, 0.10), transparent 28%),\n        var(--bg);\n    }}\n    main {{\n      max-width: 1240px;\n      margin: 0 auto;\n      padding: 32px 24px 56px;\n    }}\n    h1, h2, h3, h4 {{\n      margin: 0 0 12px;\n      font-family: \"Avenir Next Condensed\", \"Gill Sans\", sans-serif;\n      letter-spacing: 0.02em;\n    }}\n    h1 {{\n      font-size: 40px;\n      line-height: 1;\n      margin-bottom: 10px;\n    }}\n    p {{\n      margin: 0 0 12px;\n      line-height: 1.5;\n    }}\n    .hero {{\n      background: linear-gradient(135deg, rgba(255,253,250,0.96), rgba(254,245,231,0.95));\n      border: 1px solid var(--line);\n      border-radius: 24px;\n      padding: 28px 28px 22px;\n      box-shadow: 0 16px 48px rgba(60, 49, 33, 0.08);\n    }}\n    .meta {{\n      color: var(--muted);\n      font-size: 14px;\n    }}\n    .notes {{\n      margin-top: 18px;\n      background: rgba(180, 83, 9, 0.08);\n      border: 1px solid rgba(180, 83, 9, 0.20);\n      border-radius: 16px;\n      padding: 14px 16px 2px;\n    }}\n    .notes.ok {{\n      background: rgba(15, 118, 110, 0.08);\n      border-color: rgba(15, 118, 110, 0.20);\n    }}\n    .summary-table, .detail-table {{\n      width: 100%;\n      border-collapse: collapse;\n      background: var(--panel);\n      border: 1px solid var(--line);\n      border-radius: 18px;\n      overflow: hidden;\n      font-size: 14px;\n    }}\n    .summary-table th, .summary-table td,\n    .detail-table th, .detail-table td {{\n      padding: 10px 12px;\n      border-bottom: 1px solid #efe4d6;\n      text-align: left;\n      vertical-align: top;\n    }}\n    .summary-table th, .detail-table th {{\n      background: #f9f1e5;\n      font-family: \"Avenir Next Condensed\", \"Gill Sans\", sans-serif;\n      font-size: 13px;\n      text-transform: uppercase;\n      letter-spacing: 0.05em;\n    }}\n    .major-section {{\n      margin-top: 34px;\n    }}\n    .metric-section {{\n      margin-top: 18px;\n      background: rgba(255,253,250,0.72);\n      border: 1px solid var(--line);\n      border-radius: 22px;\n      padding: 18px;\n      box-shadow: 0 12px 32px rgba(60, 49, 33, 0.06);\n    }}\n    .metric-blurb {{\n      color: var(--muted);\n    }}\n    .plot-grid {{\n      display: grid;\n      grid-template-columns: repeat(auto-fit, minmax(320px, 1fr));\n      gap: 16px;\n      margin: 16px 0 18px;\n    }}\n    .plot-card {{\n      background: var(--panel);\n      border: 1px solid var(--line);\n      border-radius: 20px;\n      padding: 10px;\n      overflow-x: auto;\n    }}\n    .table-wrap {{\n      overflow-x: auto;\n    }}\n    code {{\n      background: rgba(15, 23, 42, 0.06);\n      border-radius: 6px;\n      padding: 1px 6px;\n      font-size: 0.95em;\n    }}\n    @media (max-width: 720px) {{\n      main {{\n        padding: 18px 12px 32px;\n      }}\n      .hero {{\n        padding: 20px 18px 16px;\n      }}\n      h1 {{\n        font-size: 32px;\n      }}\n    }}\n  </style>\n</head>\n<body>\n  <main>\n    <section class=\"hero\">\n      <h1>MHCflurry Fixture Error Report</h1>\n      <p>This report compares cached master-branch fixtures in <code>test/data</code> against predictions from the current branch's downloaded released models. Affinity uses the released-model JSON fixture; processing and presentation use the curated high-score presentation CSV fixture.</p>\n      <p class=\"meta\">Generated {html.escape(datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\"))}. Current release: {html.escape(str(current_release))}. Output dir: {html.escape(str(out_dir))}.</p>\n      {note_html}\n    </section>\n    <section class=\"major-section\">\n      <h2>Summary</h2>\n      {_render_summary_table(reports)}\n    </section>\n    {''.join(sections)}\n  </main>\n</body>\n</html>\n\"\"\"\n    (out_dir / \"index.html\").write_text(html_text)\n\n\ndef main() -> None:\n    args = build_parser().parse_args()\n    out_dir = Path(args.out_dir).resolve()\n    startup()\n    try:\n        current_outputs, fixture_df, metadata = _predict_current_outputs()\n        reports = _compute_metric_reports(current_outputs, fixture_df)\n        _write_outputs(out_dir, reports, metadata)\n        print(\"Wrote HTML report:\", out_dir / \"index.html\")\n        print(\"Wrote summary JSON:\", out_dir / \"summary.json\")\n        print(\"Wrote CSV directory:\", out_dir / \"csv\")\n    finally:\n        cleanup()\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "scripts/modal_train_mhcflurry.py",
    "content": "\"\"\"\nRun MHCflurry training jobs on Modal.\n\nThis script is intentionally generic: pass any supported training command\ntemplate and run multiple workers in parallel.\n\nExample:\n    modal run scripts/modal_train_mhcflurry.py \\\n      --command-template \"mhcflurry-class1-train-processing-models --data /artifacts/data/train.csv --models-dir /artifacts/runs/{run_name} --verbosity 1\" \\\n      --workers 4\n\nThe command template supports:\n    {run_name}  -> unique run id per worker\n    {worker}    -> worker index (0-based)\n\"\"\"\n\nfrom __future__ import annotations\n\nimport datetime\nimport uuid\n\nimport modal\n\n\nREPO_URL = \"https://github.com/openvax/mhcflurry.git\"\nREPO_REF = \"master\"\nREPO_DIR = \"/workspace/mhcflurry\"\nARTIFACTS_DIR = \"/artifacts\"\nVOLUME_NAME = \"mhcflurry-training\"\n\nALLOWED_TRAINING_COMMANDS = {\n    \"mhcflurry-class1-train-allele-specific-models\",\n    \"mhcflurry-class1-train-pan-allele-models\",\n    \"mhcflurry-class1-train-processing-models\",\n    \"mhcflurry-class1-train-presentation-models\",\n}\n\n\ndef _install_repo():\n    \"\"\"Build helper used at image build time.\"\"\"\n    # Import is intentionally local to keep image build behavior explicit.\n    import subprocess\n\n    subprocess.run(\n        [\n            \"bash\",\n            \"-lc\",\n            (\n                \"set -euxo pipefail; \"\n                f\"git clone --depth 1 --branch {REPO_REF} {REPO_URL} {REPO_DIR}; \"\n                f\"cd {REPO_DIR}; \"\n                \"python -m pip install --upgrade pip; \"\n                \"python -m pip install -e .\"\n            ),\n        ],\n        check=True,\n    )\n\n\nimage = (\n    modal.Image.debian_slim(python_version=\"3.12\")\n    .apt_install(\"git\")\n    .run_function(_install_repo)\n    .env(\n        {\n            # Keep torch memory allocator behavior predictable across jobs.\n            \"PYTORCH_CUDA_ALLOC_CONF\": \"expandable_segments:True\",\n            # Allow override of default downloads location if desired.\n            \"MHCFLURRY_DOWNLOADS_DIR\": f\"{ARTIFACTS_DIR}/downloads\",\n        }\n    )\n)\n\napp = modal.App(\"mhcflurry-train\", image=image)\nvolume = modal.Volume.from_name(VOLUME_NAME, create_if_missing=True)\n\n\n@app.function(\n    gpu=\"A100\",\n    timeout=12 * 60 * 60,\n    cpu=8,\n    memory=32768,\n    volumes={ARTIFACTS_DIR: volume},\n)\ndef run_training_job(job: dict) -> dict:\n    import os\n    import shlex\n    import subprocess\n    import time\n\n    run_name = job[\"run_name\"]\n    command = job[\"command\"]\n    run_dir = os.path.join(ARTIFACTS_DIR, \"runs\", run_name)\n    os.makedirs(run_dir, exist_ok=True)\n\n    argv = shlex.split(command)\n    if not argv:\n        raise ValueError(\"Empty command\")\n    if argv[0] not in ALLOWED_TRAINING_COMMANDS:\n        raise ValueError(\n            \"Unsupported command '%s'. Allowed: %s\"\n            % (argv[0], sorted(ALLOWED_TRAINING_COMMANDS))\n        )\n\n    stdout_path = os.path.join(run_dir, \"stdout.log\")\n    stderr_path = os.path.join(run_dir, \"stderr.log\")\n\n    start = time.time()\n    with open(stdout_path, \"w\") as out_f, open(stderr_path, \"w\") as err_f:\n        proc = subprocess.run(\n            argv,\n            cwd=REPO_DIR,\n            stdout=out_f,\n            stderr=err_f,\n            text=True,\n            check=False,\n        )\n    elapsed = time.time() - start\n\n    return {\n        \"run_name\": run_name,\n        \"command\": command,\n        \"exit_code\": proc.returncode,\n        \"elapsed_seconds\": elapsed,\n        \"stdout_path\": stdout_path,\n        \"stderr_path\": stderr_path,\n    }\n\n\n@app.local_entrypoint()\ndef main(command_template: str, workers: int = 1):\n    timestamp = datetime.datetime.utcnow().strftime(\"%Y%m%d-%H%M%S\")\n    jobs = []\n    for worker in range(workers):\n        run_name = f\"{timestamp}-w{worker:03d}-{uuid.uuid4().hex[:8]}\"\n        command = command_template.format(run_name=run_name, worker=worker)\n        jobs.append({\"run_name\": run_name, \"command\": command})\n\n    results = list(run_training_job.map(jobs))\n    results = sorted(results, key=lambda d: d[\"run_name\"])\n\n    for result in results:\n        print(\n            \"%s exit=%s elapsed=%.1fs stdout=%s stderr=%s\"\n            % (\n                result[\"run_name\"],\n                result[\"exit_code\"],\n                result[\"elapsed_seconds\"],\n                result[\"stdout_path\"],\n                result[\"stderr_path\"],\n            )\n        )\n\n"
  },
  {
    "path": "scripts/plot_fixture_diffs.py",
    "content": "#!/usr/bin/env python\n\"\"\"\nCompare current PyTorch predictions against the TF fixture and generate\nper-output figures showing absolute and percentile differences.\n\nUsage:\n    python scripts/plot_fixture_diffs.py [--out-dir /tmp/fixture_diffs]\n\"\"\"\nimport argparse\nimport json\nimport os\nimport sys\nimport warnings\n\nimport numpy as np\nimport pandas as pd\nimport matplotlib\nmatplotlib.use(\"Agg\")\nimport matplotlib.pyplot as plt\n\nfrom mhcflurry import Class1AffinityPredictor, Class1PresentationPredictor\nfrom mhcflurry.downloads import (\n    configure,\n    get_default_class1_models_dir,\n    get_default_class1_presentation_models_dir,\n)\n\nFIXTURE_CSV = os.path.join(\n    os.path.dirname(__file__), os.pardir, \"test\", \"data\",\n    \"master_released_class1_presentation_highscore_rows.csv.gz\",\n)\nFIXTURE_METADATA = os.path.join(\n    os.path.dirname(__file__), os.pardir, \"test\", \"data\",\n    \"master_released_class1_presentation_highscore_rows_metadata.json\",\n)\nBASE_COLUMNS = [\"row_id\", \"peptide\", \"allele\", \"n_flank\", \"c_flank\"]\nSTRING_COLUMNS = [\"pres_with_best_allele\", \"pres_without_best_allele\"]\n\n\ndef load_fixture():\n    df = pd.read_csv(FIXTURE_CSV, keep_default_na=False)\n    with open(FIXTURE_METADATA) as f:\n        metadata = json.load(f)\n    return df, metadata\n\n\ndef generate_predictions(fixture_df):\n    configure()\n    affinity_predictor = Class1AffinityPredictor.load(\n        get_default_class1_models_dir())\n    presentation_predictor = Class1PresentationPredictor.load(\n        get_default_class1_presentation_models_dir())\n\n    peptides = fixture_df[\"peptide\"].tolist()\n    alleles = fixture_df[\"allele\"].tolist()\n    n_flanks = fixture_df[\"n_flank\"].tolist()\n    c_flanks = fixture_df[\"c_flank\"].tolist()\n\n    with warnings.catch_warnings():\n        warnings.simplefilter(\"ignore\")\n        aff_df = affinity_predictor.predict_to_dataframe(\n            peptides=peptides,\n            alleles=alleles,\n            throw=False,\n            include_percentile_ranks=True,\n            include_confidence_intervals=True,\n            centrality_measure=\"mean\",\n            model_kwargs={\"batch_size\": 4096},\n        )\n\n    sample_names = alleles\n    allele_map = {allele: [allele] for allele in sorted(set(alleles))}\n    with warnings.catch_warnings():\n        warnings.simplefilter(\"ignore\")\n        pres_with_df = presentation_predictor.predict(\n            peptides=peptides,\n            alleles=allele_map,\n            sample_names=sample_names,\n            n_flanks=n_flanks,\n            c_flanks=c_flanks,\n            include_affinity_percentile=True,\n            verbose=0,\n            throw=True,\n        ).sort_values(\"peptide_num\")\n        pres_without_df = presentation_predictor.predict(\n            peptides=peptides,\n            alleles=allele_map,\n            sample_names=sample_names,\n            n_flanks=None,\n            c_flanks=None,\n            include_affinity_percentile=True,\n            verbose=0,\n            throw=True,\n        ).sort_values(\"peptide_num\")\n\n    predicted = fixture_df[BASE_COLUMNS].copy()\n    predicted[\"affinity_prediction\"] = aff_df[\"prediction\"].values\n    predicted[\"affinity_prediction_low\"] = aff_df.get(\n        \"prediction_low\", np.nan)\n    predicted[\"affinity_prediction_high\"] = aff_df.get(\n        \"prediction_high\", np.nan)\n    predicted[\"affinity_prediction_percentile\"] = aff_df.get(\n        \"prediction_percentile\", np.nan)\n\n    predicted[\"pres_with_affinity\"] = pres_with_df[\"affinity\"].values\n    predicted[\"pres_with_best_allele\"] = (\n        pres_with_df[\"best_allele\"].astype(str).values)\n    predicted[\"pres_with_affinity_percentile\"] = (\n        pres_with_df[\"affinity_percentile\"].values)\n    predicted[\"processing_with_score\"] = (\n        pres_with_df[\"processing_score\"].values)\n    predicted[\"pres_with_processing_score\"] = (\n        pres_with_df[\"processing_score\"].values)\n    predicted[\"pres_with_presentation_score\"] = (\n        pres_with_df[\"presentation_score\"].values)\n    predicted[\"pres_with_presentation_percentile\"] = (\n        pres_with_df[\"presentation_percentile\"].values)\n\n    predicted[\"pres_without_affinity\"] = pres_without_df[\"affinity\"].values\n    predicted[\"pres_without_best_allele\"] = (\n        pres_without_df[\"best_allele\"].astype(str).values)\n    predicted[\"pres_without_affinity_percentile\"] = (\n        pres_without_df[\"affinity_percentile\"].values)\n    predicted[\"processing_without_score\"] = (\n        pres_without_df[\"processing_score\"].values)\n    predicted[\"pres_without_processing_score\"] = (\n        pres_without_df[\"processing_score\"].values)\n    predicted[\"pres_without_presentation_score\"] = (\n        pres_without_df[\"presentation_score\"].values)\n    predicted[\"pres_without_presentation_percentile\"] = (\n        pres_without_df[\"presentation_percentile\"].values)\n\n    return predicted\n\n\ndef plot_output(col, tf_vals, pt_vals, out_dir):\n    diff = pt_vals - tf_vals\n    abs_diff = np.abs(diff)\n    pct_diff = np.where(\n        tf_vals != 0,\n        100.0 * abs_diff / np.abs(tf_vals),\n        np.where(abs_diff == 0, 0.0, np.inf),\n    )\n    pct_diff_finite = pct_diff[np.isfinite(pct_diff)]\n\n    fig, axes = plt.subplots(2, 2, figsize=(14, 10))\n    fig.suptitle(col, fontsize=14, fontweight=\"bold\")\n\n    # Top-left: scatter TF vs PyTorch\n    ax = axes[0, 0]\n    ax.scatter(tf_vals, pt_vals, alpha=0.5, s=15, edgecolors=\"none\")\n    lims = [\n        min(tf_vals.min(), pt_vals.min()),\n        max(tf_vals.max(), pt_vals.max()),\n    ]\n    ax.plot(lims, lims, \"r--\", linewidth=0.8, label=\"y = x\")\n    ax.set_xlabel(\"TF (fixture)\")\n    ax.set_ylabel(\"PyTorch (current)\")\n    ax.set_title(\"TF vs PyTorch\")\n    ax.legend(fontsize=8)\n\n    # Top-right: histogram of absolute differences\n    ax = axes[0, 1]\n    ax.hist(diff, bins=50, edgecolor=\"black\", linewidth=0.3)\n    ax.axvline(0, color=\"red\", linestyle=\"--\", linewidth=0.8)\n    ax.set_xlabel(\"Difference (PyTorch − TF)\")\n    ax.set_ylabel(\"Count\")\n    ax.set_title(\n        \"Absolute diff: mean=%.2e, max=%.2e\" % (\n            np.mean(abs_diff), np.max(abs_diff)))\n\n    # Bottom-left: absolute difference by allele (boxplot)\n    ax = axes[1, 0]\n    # Build a dataframe for the boxplot\n    tmp = pd.DataFrame({\"allele\": alleles_global, \"abs_diff\": abs_diff})\n    allele_order = (\n        tmp.groupby(\"allele\")[\"abs_diff\"].median()\n        .sort_values(ascending=False).index.tolist())\n    box_data = [\n        tmp.loc[tmp.allele == a, \"abs_diff\"].values for a in allele_order]\n    if len(allele_order) <= 40:\n        bp = ax.boxplot(box_data, vert=True, patch_artist=True)\n        ax.set_xticks(range(1, len(allele_order) + 1))\n        ax.set_xticklabels(\n            [a.replace(\"HLA-\", \"\") for a in allele_order],\n            rotation=90, fontsize=6)\n        for patch in bp[\"boxes\"]:\n            patch.set_facecolor(\"steelblue\")\n            patch.set_alpha(0.7)\n    else:\n        ax.bar(range(len(allele_order)),\n               [np.median(d) for d in box_data], color=\"steelblue\")\n        ax.set_xticks(range(len(allele_order)))\n        ax.set_xticklabels(allele_order, rotation=90, fontsize=5)\n    ax.set_ylabel(\"|Difference|\")\n    ax.set_title(\"Absolute diff by allele\")\n\n    # Bottom-right: histogram of percent differences\n    ax = axes[1, 1]\n    if len(pct_diff_finite) > 0:\n        clip_val = np.percentile(pct_diff_finite, 99)\n        ax.hist(\n            np.clip(pct_diff_finite, 0, clip_val),\n            bins=50, edgecolor=\"black\", linewidth=0.3)\n        ax.set_xlabel(\"Percent difference (%)\")\n        ax.set_ylabel(\"Count\")\n        median_pct = np.median(pct_diff_finite)\n        ax.set_title(\n            \"Pct diff: median=%.4f%%, 99th=%.4f%%\" % (\n                median_pct, clip_val))\n    else:\n        ax.text(0.5, 0.5, \"No finite percent diffs\",\n                ha=\"center\", va=\"center\", transform=ax.transAxes)\n        ax.set_title(\"Percent differences\")\n\n    fig.tight_layout(rect=[0, 0, 1, 0.95])\n    fname = os.path.join(out_dir, \"%s.png\" % col)\n    fig.savefig(fname, dpi=150)\n    plt.close(fig)\n    return {\n        \"column\": col,\n        \"mean_abs_diff\": float(np.mean(abs_diff)),\n        \"max_abs_diff\": float(np.max(abs_diff)),\n        \"median_pct_diff\": float(\n            np.median(pct_diff_finite)) if len(pct_diff_finite) else None,\n        \"p99_pct_diff\": float(\n            np.percentile(pct_diff_finite, 99)) if len(\n                pct_diff_finite) else None,\n    }\n\n\ndef main():\n    parser = argparse.ArgumentParser(description=__doc__)\n    parser.add_argument(\n        \"--out-dir\", default=\"/tmp/fixture_diffs\",\n        help=\"Output directory for figures (default: /tmp/fixture_diffs)\")\n    args = parser.parse_args()\n\n    os.makedirs(args.out_dir, exist_ok=True)\n\n    print(\"Loading fixture...\")\n    fixture_df, metadata = load_fixture()\n    print(\"  %d rows, %d alleles\" % (\n        len(fixture_df), fixture_df.allele.nunique()))\n\n    print(\"Generating PyTorch predictions...\")\n    predicted_df = generate_predictions(fixture_df)\n\n    numeric_columns = [\n        c for c in fixture_df.columns\n        if c not in BASE_COLUMNS + STRING_COLUMNS\n    ]\n\n    global alleles_global\n    alleles_global = fixture_df[\"allele\"].values\n\n    print(\"Plotting %d numeric outputs...\" % len(numeric_columns))\n    summary_rows = []\n    for col in sorted(numeric_columns):\n        tf_vals = fixture_df[col].to_numpy(dtype=np.float64)\n        pt_vals = predicted_df[col].to_numpy(dtype=np.float64)\n        valid = np.isfinite(tf_vals) & np.isfinite(pt_vals)\n        if valid.sum() == 0:\n            print(\"  %s: no valid values, skipping\" % col)\n            continue\n        stats = plot_output(\n            col, tf_vals[valid], pt_vals[valid], args.out_dir)\n        summary_rows.append(stats)\n        print(\"  %s: mean_abs=%.2e, max_abs=%.2e\" % (\n            col, stats[\"mean_abs_diff\"], stats[\"max_abs_diff\"]))\n\n    # String columns: report match rate\n    for col in STRING_COLUMNS:\n        match = (\n            predicted_df[col].astype(str).values\n            == fixture_df[col].astype(str).values)\n        print(\"  %s: %d/%d match (%.1f%%)\" % (\n            col, match.sum(), len(match), 100 * match.mean()))\n\n    summary_df = pd.DataFrame(summary_rows)\n    summary_path = os.path.join(args.out_dir, \"summary.csv\")\n    summary_df.to_csv(summary_path, index=False)\n    print(\"\\nSummary written to %s\" % summary_path)\n    print(\"Figures written to %s\" % args.out_dir)\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "scripts/validate_allele_sequences.py",
    "content": "#!/usr/bin/env python\n\"\"\"\nValidate that allele name -> pseudosequence mappings are consistent.\n\nChecks:\n  1. The raw allele_sequences.csv from the downloads is loaded identically\n     by both the current code and a from-scratch parse.\n  2. The renormalization step (mhcgnomes) in Class1AffinityPredictor.load()\n     produces a deterministic mapping with no collisions or lost alleles.\n  3. Every allele in the downloaded models' allele_sequences.csv maps to the\n     same pseudosequence as the standalone allele_sequences download.\n  4. Every allele used in the fixture CSV resolves to a sequence.\n\nUsage:\n    python scripts/validate_allele_sequences.py\n\"\"\"\nimport os\nimport sys\n\nimport pandas as pd\n\nfrom mhcflurry import Class1AffinityPredictor\nfrom mhcflurry.common import normalize_allele_name\nfrom mhcflurry.downloads import configure, get_path, get_default_class1_models_dir\n\n\ndef load_raw_csv(path):\n    \"\"\"Load allele_sequences.csv exactly as on disk (no renormalization).\"\"\"\n    return pd.read_csv(path, index_col=0).iloc[:, 0].to_dict()\n\n\ndef renormalize(raw_mapping):\n    \"\"\"\n    Apply the same renormalization that Class1AffinityPredictor.load() does.\n\n    Returns (renormalized_dict, skipped_list, collision_list).\n    \"\"\"\n    renormalized = {}\n    skipped = []\n    collisions = []\n    for name, sequence in raw_mapping.items():\n        normalized = normalize_allele_name(name, raise_on_error=False)\n        if normalized is None:\n            skipped.append(name)\n            continue\n        if normalized in renormalized and name != normalized:\n            collisions.append((name, normalized))\n            continue\n        renormalized[normalized] = sequence\n    return renormalized, skipped, collisions\n\n\ndef main():\n    configure()\n    errors = []\n\n    # --- 1. Load the standalone allele_sequences download ---\n    print(\"=\" * 70)\n    print(\"1. Loading standalone allele_sequences download\")\n    print(\"=\" * 70)\n    standalone_csv = get_path(\"allele_sequences\", \"allele_sequences.csv\")\n    standalone_raw = load_raw_csv(standalone_csv)\n    print(\"  Raw entries: %d\" % len(standalone_raw))\n    standalone_norm, standalone_skipped, standalone_collisions = renormalize(\n        standalone_raw)\n    print(\"  After renormalization: %d entries\" % len(standalone_norm))\n    print(\"  Skipped (unparseable): %d\" % len(standalone_skipped))\n    print(\"  Collisions (duplicate after renorm): %d\" % len(\n        standalone_collisions))\n    if standalone_skipped:\n        print(\"  Skipped names (first 20): %s\" % standalone_skipped[:20])\n\n    # --- 2. Load from the predictor (goes through the same renorm logic) ---\n    print()\n    print(\"=\" * 70)\n    print(\"2. Loading allele_to_sequence from Class1AffinityPredictor.load()\")\n    print(\"=\" * 70)\n    models_dir = get_default_class1_models_dir()\n    predictor = Class1AffinityPredictor.load(models_dir)\n    predictor_seq = predictor.allele_to_sequence\n    print(\"  Entries: %d\" % len(predictor_seq))\n\n    # --- 3. Load the raw CSV from the models directory ---\n    print()\n    print(\"=\" * 70)\n    print(\"3. Loading raw CSV from models directory\")\n    print(\"=\" * 70)\n    models_csv = os.path.join(models_dir, \"allele_sequences.csv\")\n    models_raw = load_raw_csv(models_csv)\n    print(\"  Raw entries: %d\" % len(models_raw))\n    models_norm, models_skipped, models_collisions = renormalize(models_raw)\n    print(\"  After renormalization: %d entries\" % len(models_norm))\n    print(\"  Skipped: %d\" % len(models_skipped))\n    print(\"  Collisions: %d\" % len(models_collisions))\n\n    # --- 4. Compare predictor mapping vs models dir renormalized ---\n    print()\n    print(\"=\" * 70)\n    print(\"4. Comparing predictor.allele_to_sequence vs models-dir renormalized\")\n    print(\"=\" * 70)\n    pred_keys = set(predictor_seq.keys())\n    model_keys = set(models_norm.keys())\n    only_pred = pred_keys - model_keys\n    only_model = model_keys - pred_keys\n    common = pred_keys & model_keys\n    print(\"  In predictor only: %d\" % len(only_pred))\n    print(\"  In models-dir renorm only: %d\" % len(only_model))\n    print(\"  In common: %d\" % len(common))\n    mismatched = []\n    for k in sorted(common):\n        if predictor_seq[k] != models_norm[k]:\n            mismatched.append(k)\n    if mismatched:\n        print(\"  MISMATCH in %d alleles:\" % len(mismatched))\n        for k in mismatched[:20]:\n            print(\"    %s:\" % k)\n            print(\"      predictor: %s\" % predictor_seq[k])\n            print(\"      models:    %s\" % models_norm[k])\n        errors.append(\n            \"Predictor vs models-dir mismatch: %d alleles\" % len(mismatched))\n    else:\n        print(\"  All %d common alleles have identical sequences.\" % len(common))\n\n    # --- 5. Compare models-dir vs standalone download (informational) ---\n    # The models ship with their own allele_sequences.csv which may use a\n    # different pseudosequence definition than the standalone download.\n    # Differences here are expected and not an error.\n    print()\n    print(\"=\" * 70)\n    print(\"5. Comparing models-dir vs standalone allele_sequences download\")\n    print(\"   (informational — different pseudosequence versions are expected)\")\n    print(\"=\" * 70)\n    model_keys_set = set(models_norm.keys())\n    standalone_keys_set = set(standalone_norm.keys())\n    only_models = model_keys_set - standalone_keys_set\n    only_standalone = standalone_keys_set - model_keys_set\n    common2 = model_keys_set & standalone_keys_set\n    print(\"  In models only: %d\" % len(only_models))\n    print(\"  In standalone only: %d\" % len(only_standalone))\n    print(\"  In common: %d\" % len(common2))\n    mismatched2 = []\n    for k in sorted(common2):\n        if models_norm[k] != standalone_norm[k]:\n            mismatched2.append(k)\n    if mismatched2:\n        same_len = all(\n            len(models_norm[k]) == len(standalone_norm[k])\n            for k in mismatched2)\n        print(\"  Different sequences: %d / %d (same length: %s)\" % (\n            len(mismatched2), len(common2), same_len))\n        print(\"  (This is expected if pseudosequence positions differ.)\")\n    else:\n        print(\"  All %d common alleles have identical sequences.\" % len(\n            common2))\n\n    # --- 6. Validate fixture alleles ---\n    print()\n    print(\"=\" * 70)\n    print(\"6. Validating fixture alleles resolve to sequences\")\n    print(\"=\" * 70)\n    fixture_csv = os.path.join(\n        os.path.dirname(__file__), os.pardir, \"test\", \"data\",\n        \"master_released_class1_presentation_highscore_rows.csv.gz\",\n    )\n    if os.path.exists(fixture_csv):\n        fixture_df = pd.read_csv(fixture_csv, keep_default_na=False)\n        fixture_alleles = fixture_df[\"allele\"].unique()\n        print(\"  Fixture alleles: %d\" % len(fixture_alleles))\n        missing = []\n        for allele in sorted(fixture_alleles):\n            normalized = normalize_allele_name(allele, raise_on_error=False)\n            if normalized is None:\n                missing.append((allele, \"failed to normalize\"))\n            elif normalized not in predictor_seq:\n                missing.append((allele, \"normalized to '%s' but not in predictor\" % normalized))\n        if missing:\n            print(\"  MISSING %d fixture alleles:\" % len(missing))\n            for allele, reason in missing:\n                print(\"    %s: %s\" % (allele, reason))\n            errors.append(\"Missing fixture alleles: %d\" % len(missing))\n        else:\n            print(\"  All %d fixture alleles resolve correctly.\" % len(\n                fixture_alleles))\n    else:\n        print(\"  Fixture CSV not found, skipping.\")\n\n    # --- 7. Verify normalize_allele_name is deterministic ---\n    print()\n    print(\"=\" * 70)\n    print(\"7. Verifying normalize_allele_name determinism (sample of 1000)\")\n    print(\"=\" * 70)\n    sample_alleles = sorted(predictor_seq.keys())[:1000]\n    nondeterministic = []\n    for allele in sample_alleles:\n        n1 = normalize_allele_name(allele, raise_on_error=False)\n        n2 = normalize_allele_name(allele, raise_on_error=False)\n        if n1 != n2:\n            nondeterministic.append((allele, n1, n2))\n    if nondeterministic:\n        print(\"  NON-DETERMINISTIC: %d alleles\" % len(nondeterministic))\n        errors.append(\"Non-deterministic normalization: %d\" % len(\n            nondeterministic))\n    else:\n        print(\"  All 1000 sampled alleles normalize deterministically.\")\n\n    # --- 8. Check idempotency: normalize(normalize(x)) == normalize(x) ---\n    print()\n    print(\"=\" * 70)\n    print(\"8. Checking normalize idempotency (sample of 1000)\")\n    print(\"=\" * 70)\n    non_idempotent = []\n    for allele in sample_alleles:\n        n1 = normalize_allele_name(allele, raise_on_error=False)\n        if n1 is None:\n            continue\n        n2 = normalize_allele_name(n1, raise_on_error=False)\n        if n1 != n2:\n            non_idempotent.append((allele, n1, n2))\n    if non_idempotent:\n        print(\"  NON-IDEMPOTENT: %d alleles\" % len(non_idempotent))\n        for allele, n1, n2 in non_idempotent[:10]:\n            print(\"    %s -> %s -> %s\" % (allele, n1, n2))\n        errors.append(\"Non-idempotent normalization: %d\" % len(\n            non_idempotent))\n    else:\n        print(\"  All sampled alleles normalize idempotently.\")\n\n    # --- Summary ---\n    print()\n    print(\"=\" * 70)\n    if errors:\n        print(\"ERRORS FOUND:\")\n        for e in errors:\n            print(\"  - %s\" % e)\n        sys.exit(1)\n    else:\n        print(\"ALL CHECKS PASSED\")\n        sys.exit(0)\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "selected-peptides.csv",
    "content": ",gene,rnav8_len,k,start_1based,end_1based,peptide,is_mut,is_ref,is_linker,is_leader,is_mitd,is_chimeric,is_CTA,mhcflurry_peptide,mhcflurry_peptide_num,mhcflurry_sample_name,mhcflurry_affinity,mhcflurry_best_allele,mhcflurry_processing_score,mhcflurry_presentation_score,mhcflurry_presentation_percentile,netmhcpan_best_allele_by_pr,netmhcpan_pr,netmhcpan_aff\n18,ABI3BP,25,9,1,9,IPPKTSRTL,False,True,False,False,False,False,False,IPPKTSRTL,18,sample1,76.63097212525604,C*01:02,0.4595130290836096,0.8500543950692536,0.1993750000000034,HLA-C*01:02,0.158,445.76\n25,ABI3BP,25,9,8,16,TLEQPMATL,True,False,False,False,False,False,False,TLEQPMATL,25,sample1,59.405680603234636,C*01:02,0.0902106249704957,0.6627795863965235,0.5019293478260352,HLA-C*01:02,0.864,3313.63\n33,ABI3BP,25,9,16,24,LAPSETPFV,False,True,False,False,False,False,False,LAPSETPFV,33,sample1,87.78069923706266,C*01:02,0.06101071718148887,0.548132005488575,0.7210054347824979,HLA-C*01:02,0.124,346.21\n41,ABI3BP,25,10,7,16,RTLEQPMATL,True,False,False,False,False,False,False,RTLEQPMATL,41,sample1,342.70824393676776,C*01:02,0.6008451171219349,0.6857038580738146,0.46138586956517713,HLA-C*01:02,1.876,7023.27\n82,BMP1,25,8,17,24,YRSRLCWY,False,True,False,False,False,False,False,YRSRLCWY,82,sample1,2061.1929952131527,C*07:01,0.009316287454566918,0.04497807155051911,5.671874999999957,HLA-C*07:01,0.499,1556.12\n84,BMP1,25,9,1,9,VTPGEKIIL,False,True,False,False,False,False,False,VTPGEKIIL,84,sample1,32.455146232051455,C*01:02,0.07126966048963368,0.7677951702843354,0.3283152173912782,HLA-C*01:02,0.107,283.16\n89,BMP1,25,9,6,14,KIILNFTTL,True,False,False,False,False,False,False,KIILNFTTL,89,sample1,62.82330105310425,C*01:02,0.24402968399226665,0.762349438982255,0.3385597826086837,HLA-C*01:02,0.437,1648.2\n108,BMP1,25,10,8,17,ILNFTTLDLY,True,False,False,False,False,False,False,ILNFTTLDLY,108,sample1,397.25588353695406,A*01:01,0.0590585444442695,0.2175041276152879,1.731739130434704,HLA-A*01:01,0.139,185.87\n138,CDC40,25,8,7,14,LSPNGKSL,True,False,False,False,False,False,False,LSPNGKSL,138,sample1,37.06839713891176,C*01:02,0.3020476885139942,0.867992830469506,0.17184782608696025,HLA-C*01:02,0.106,281.61\n155,CDC40,25,9,6,14,TLSPNGKSL,True,False,False,False,False,False,False,TLSPNGKSL,155,sample1,30.381092671526748,C*01:02,0.118210882646963,0.8062982348254204,0.2683967391303952,HLA-C*01:02,0.31,1129.8\n223,MAP2,46,8,26,33,VVPFTKAL,True,False,False,False,False,False,False,VVPFTKAL,223,sample1,106.3602561235778,C*01:02,0.17740425933152437,0.6031125399380882,0.6095380434781958,HLA-C*01:02,0.102,267.69\n224,MAP2,46,8,27,34,VPFTKALM,True,False,False,False,False,False,False,VPFTKALM,224,sample1,152.8754085041204,B*08:01,0.42134576104581356,0.7169026364612648,0.4095652173913038,HLA-B*08:01,2.026,1969.94\n255,MAP2,46,9,19,27,VRIYQGRVV,True,False,False,False,False,False,False,VRIYQGRVV,255,sample1,142.73052529543338,C*07:01,0.4826787058264017,0.7708029911722674,0.32350543478260363,HLA-C*07:01,0.18,558.28\n257,MAP2,46,9,21,29,IYQGRVVPF,True,False,False,False,False,False,False,IYQGRVVPF,257,sample1,137.68788910889734,C*07:01,0.7557790763676167,0.9015044767998036,0.11961956521740547,HLA-C*07:01,0.856,2492.27\n260,MAP2,46,9,24,32,GRVVPFTKA,True,False,False,False,False,False,False,GRVVPFTKA,260,sample1,131.33533925153966,B*27:05,0.46765692345798016,0.7756550111986702,0.3167663043477944,HLA-B*27:05,2.503,1572.45\n261,MAP2,46,9,25,33,RVVPFTKAL,True,False,False,False,False,False,False,RVVPFTKAL,261,sample1,53.399215987370674,C*01:02,0.5948334485292435,0.9285526860313993,0.07940217391303861,HLA-C*01:02,0.272,955.25\n262,MAP2,46,9,26,34,VVPFTKALM,True,False,False,False,False,False,False,VVPFTKALM,262,sample1,70.3866518862408,C*01:02,0.08792760979849845,0.6231343104279621,0.5722554347825621,HLA-C*01:02,0.087,206.1\n263,MAP2,46,9,27,35,VPFTKALMI,True,False,False,False,False,False,False,VPFTKALMI,263,sample1,89.80587587212494,B*08:01,0.7832967713475227,0.9385730624865474,0.0643206521739188,HLA-B*08:01,1.4,1323.8\n268,MAP2,46,9,32,40,ALMIKFEEI,True,False,False,False,False,False,False,ALMIKFEEI,268,sample1,158.95257022421424,B*08:01,0.661398708820343,0.8507660148330426,0.1993750000000034,HLA-B*08:01,0.262,181.28\n294,MAP2,46,10,20,29,RIYQGRVVPF,True,False,False,False,False,False,False,RIYQGRVVPF,294,sample1,407.29741110701156,C*01:02,0.7349352389574051,0.7477825234902032,0.36095108695650424,HLA-B*08:01,0.999,879.81\n355,PIP5K1A,28,8,8,15,APVATPAL,True,False,False,False,False,False,False,APVATPAL,355,sample1,1177.232444282441,C*01:02,0.305454570800066,0.18790045504598077,1.9201630434781833,HLA-C*01:02,0.439,1656.38\n362,PIP5K1A,28,8,15,22,LLTSHRSL,True,False,False,False,False,False,False,LLTSHRSL,362,sample1,1231.6157879369582,B*08:01,0.0777759711490944,0.09004104244184576,3.1588586956520857,HLA-B*08:01,0.377,275.73\n375,PIP5K1A,28,9,7,15,AAPVATPAL,True,False,False,False,False,False,False,AAPVATPAL,375,sample1,22.01541646935366,C*01:02,0.7341283150017262,0.9805101777868475,0.007255434782621251,HLA-C*01:02,0.007,11.59\n394,PIP5K1A,28,10,6,15,RAAPVATPAL,True,False,False,False,False,False,False,RAAPVATPAL,394,sample1,49.96962487883811,C*01:02,0.24621670693159103,0.8014998310015791,0.27603260869564394,HLA-C*01:02,0.069,151.89\n395,PIP5K1A,28,10,7,16,AAPVATPALL,True,False,False,False,False,False,False,AAPVATPALL,395,sample1,33.21428884149478,C*01:02,0.397992642596364,0.9112854698647911,0.10394021739132597,HLA-C*01:02,0.068,149.04\n412,PIP5K1A,28,11,5,15,RRAAPVATPAL,True,False,False,False,False,False,False,RRAAPVATPAL,412,sample1,59.01946907449498,C*07:01,0.1422674390487373,0.7039596136958644,0.4310869565217246,HLA-C*01:02,0.212,689.74\n454,SMC5,25,9,11,19,RQKRIGNTR,True,False,False,False,False,False,False,RQKRIGNTR,454,sample1,121.07188531463042,B*27:05,0.15974517143331468,0.5572798494670976,0.7013858695651152,HLA-B*27:05,1.4,599.19\n456,SMC5,25,9,13,21,KRIGNTRKM,True,False,False,False,False,False,False,KRIGNTRKM,456,sample1,36.80007240208899,C*07:01,0.06430770861334167,0.7406268657670435,0.37358695652172,HLA-C*07:01,0.066,195.03\n516,SPG11,25,9,7,15,LIDDQDVSI,True,False,False,False,False,False,False,LIDDQDVSI,516,sample1,125.86779456002365,C*01:02,0.3316497001796961,0.6901060990474238,0.4548369565217172,HLA-A*01:01,2.355,7932.71\n526,SPG11,25,9,17,25,LLSLRILSF,False,True,False,False,False,False,False,LLSLRILSF,526,sample1,77.4165613453536,B*08:01,0.4522215351462364,0.8454462000328207,0.20706521739127481,HLA-B*08:01,0.09,58.22\n532,SPG11,25,10,6,15,KLIDDQDVSI,True,False,False,False,False,False,False,KLIDDQDVSI,532,sample1,239.8657978219368,C*01:02,0.7258778959512711,0.8276511400361685,0.23519021739130608,HLA-C*01:02,9.911,22746.92\n542,SPG11,25,10,16,25,SLLSLRILSF,False,True,False,False,False,False,False,SLLSLRILSF,542,sample1,627.5234729046349,B*27:05,0.31914879381656647,0.309140415987372,1.3343749999999233,HLA-B*08:01,0.333,237.23\n660,TECPR1,83,9,27,35,EVPGPSWSL,True,False,False,False,False,False,False,EVPGPSWSL,660,sample1,59.34389753447961,C*01:02,0.46677792351692915,0.8817450760131815,0.1514402173913254,HLA-C*01:02,0.268,935.86\n678,TECPR1,83,9,45,53,SRWESAWSG,True,False,False,False,False,False,False,SRWESAWSG,678,sample1,340.03976756247806,B*27:05,0.1498977376613766,0.3083579983400033,1.3343749999999233,HLA-B*27:05,0.443,136.67\n703,TECPR1,83,9,70,78,TIPAAPVGL,True,False,False,False,False,False,False,TIPAAPVGL,703,sample1,26.28428712804486,C*01:02,0.3070325553417206,0.9033408626298494,0.11682065217391369,HLA-C*01:02,0.041,83.92\n706,TECPR1,83,9,73,81,AAPVGLRWL,True,False,False,False,False,False,False,AAPVGLRWL,706,sample1,38.28177943121105,C*01:02,0.15587826329283416,0.7916457568902094,0.2912499999999625,HLA-C*01:02,0.081,193.21\n734,TECPR1,83,10,26,35,KEVPGPSWSL,True,False,False,False,False,False,False,KEVPGPSWSL,734,sample1,2248.742127250853,B*27:05,0.4546149540692568,0.17298381313137473,2.027690217391225,HLA-C*01:02,0.373,1381.6\n753,TECPR1,83,10,45,54,SRWESAWSGL,True,False,False,False,False,False,False,SRWESAWSGL,753,sample1,63.243425369373774,C*07:01,0.19101055013015866,0.7254265272158179,0.3973641304347808,HLA-B*27:05,0.01,11.57\n777,TECPR1,83,10,69,78,LTIPAAPVGL,True,False,False,False,False,False,False,LTIPAAPVGL,777,sample1,111.42488810790788,C*01:02,0.2800327483564615,0.6762032728784696,0.4788586956521357,HLA-C*01:02,0.367,1356.26\n868,VPS72,25,8,13,20,RALLPLEL,True,False,False,False,False,False,False,RALLPLEL,868,sample1,170.84081118912832,C*01:02,0.38518839702010155,0.6666992275701744,0.49510869565216353,HLA-C*07:01,4.065,9095.19\n885,VPS72,25,9,12,20,ERALLPLEL,True,False,False,False,False,False,False,ERALLPLEL,885,sample1,47.50778208829167,C*07:01,0.2833710527047515,0.8286693910953119,0.23345108695652073,HLA-C*07:01,0.188,590.32\n933,WWC1,24,8,12,19,MGHLQHEL,True,False,False,False,False,False,False,MGHLQHEL,933,sample1,115.99551643229341,C*01:02,0.3307158090174198,0.7061279167746464,0.42766304347823336,HLA-B*08:01,0.717,594.01\n951,WWC1,24,9,13,21,GHLQHELQF,True,False,False,False,False,False,False,GHLQHELQF,951,sample1,113.42506890264815,C*07:01,0.259261479601264,0.6560192228648354,0.5143749999999585,HLA-C*07:01,4.486,9796.8\n1002,MAGEA10p1,25,9,1,9,SSPSVVASL,False,True,False,False,False,False,True,SSPSVVASL,1002,sample1,21.110055920581388,C*01:02,0.16465385956689715,0.8748015269207916,0.16249999999999432,HLA-C*01:02,0.008,12.33\n1019,MAGEA10p1,25,10,1,10,SSPSVVASLP,False,True,False,False,False,False,True,SSPSVVASLP,1019,sample1,13090.572570329334,C*01:02,0.008235680041252635,0.007728873015256679,31.643179347826077,HLA-C*01:02,0.494,1899.73\n1035,MAGEA10p1,25,11,1,11,SSPSVVASLPL,False,True,False,False,False,False,True,SSPSVVASLPL,1035,sample1,418.57842279669956,C*01:02,0.02461180041427724,0.18955046992904015,1.905543478260796,HLA-C*01:02,0.121,334.2\n1051,MAGEA10p2,25,8,2,9,VTDLVQFL,False,True,False,False,False,False,True,VTDLVQFL,1051,sample1,218.4621970498831,A*01:01,0.7729858718812466,0.8613483409926228,0.1821467391304452,HLA-A*01:01,0.941,2721.01\n1057,MAGEA10p2,25,8,8,15,FLLFKYQM,False,True,False,False,False,False,True,FLLFKYQM,1057,sample1,410.1786929319161,B*08:01,0.13465335359796882,0.2603978337209331,1.5202989130434048,HLA-B*08:01,0.128,87.63\n1068,MAGEA10p2,25,9,1,9,KVTDLVQFL,False,True,False,False,False,False,True,KVTDLVQFL,1068,sample1,81.34874039241913,C*01:02,0.8616118803620338,0.9568792538563173,0.038315217391314604,HLA-C*07:01,0.45,1412.77\n1069,MAGEA10p2,25,9,2,10,VTDLVQFLL,False,True,False,False,False,False,True,VTDLVQFLL,1069,sample1,108.78437810075624,A*01:01,0.4036145284771919,0.7679767844030344,0.3283152173912782,HLA-A*01:01,0.126,168.04\n1080,MAGEA10p2,25,9,13,21,YQMKEPITK,False,True,False,False,False,False,True,YQMKEPITK,1080,sample1,86.88238876593987,B*27:05,0.5966939721256495,0.8907509152335247,0.13692934782609711,HLA-B*27:05,2.747,1821.31\n1084,MAGEA10p2,25,9,17,25,EPITKAEIL,False,True,False,False,False,False,True,EPITKAEIL,1084,sample1,88.03372181871534,B*08:01,0.29368303483352065,0.7337152898998892,0.38508152173912436,HLA-B*08:01,1.689,1621.96\n1086,MAGEA10p2,25,10,2,11,VTDLVQFLLF,False,True,False,False,False,False,True,VTDLVQFLLF,1086,sample1,105.83716241662344,A*01:01,0.5764349922537804,0.8623637058673023,0.1807065217391255,HLA-A*01:01,0.08,91.16\n1205,MAGEC1p1,25,9,6,14,MPTAGMPSL,False,True,False,False,False,False,True,MPTAGMPSL,1205,sample1,77.11967414544304,B*08:01,0.8355902284383774,0.9551892991765902,0.03964673913044692,HLA-C*01:02,2.124,7816.58\n1221,MAGEC1p1,25,10,5,14,DMPTAGMPSL,False,True,False,False,False,False,True,DMPTAGMPSL,1221,sample1,119.73642462637594,C*01:02,0.30586328147910535,0.6808950908421842,0.4698369565216751,HLA-C*01:02,0.584,2261.99\n1282,MAGEC1p2,25,9,17,25,SSPEGKDSL,False,True,False,False,False,False,True,SSPEGKDSL,1282,sample1,26.62843195242034,C*01:02,0.23959087580442429,0.8790703829859254,0.15467391304348155,HLA-C*01:02,0.036,74.21\n1314,MAGEC1p3,25,8,1,8,FAQSPLQI,False,True,False,False,False,False,True,FAQSPLQI,1314,sample1,162.1616093286923,C*01:02,0.4001904344186187,0.6893362454365073,0.4563315217391022,HLA-C*01:02,4.517,14042.74\n1324,MAGEC1p3,25,8,11,18,SPSSSSTL,False,True,False,False,False,False,True,SPSSSSTL,1324,sample1,2396.182609115678,C*01:02,0.10404993500560522,0.05382663961091608,4.807092391304323,HLA-C*01:02,0.264,916.83\n1341,MAGEC1p3,25,9,10,18,VSPSSSSTL,False,True,False,False,False,False,True,VSPSSSSTL,1341,sample1,26.7171004845041,C*01:02,0.2750385506078601,0.8914700477888546,0.13535326086957866,HLA-C*01:02,0.007,9.22\n1344,MAGEC1p3,25,9,13,21,SSSSTLLSL,False,True,False,False,False,False,True,SSSSTLLSL,1344,sample1,43.6080517726507,C*01:02,0.05253881262615323,0.6990356519876642,0.4397010869565037,HLA-C*01:02,0.352,1290.55\n1357,MAGEC1p3,25,10,9,18,PVSPSSSSTL,False,True,False,False,False,False,True,PVSPSSSSTL,1357,sample1,270.5527050478048,C*01:02,0.005632098502246663,0.25045437521980246,1.5659510869564457,HLA-C*01:02,0.189,581.05\n1358,MAGEC1p3,25,10,10,19,VSPSSSSTLL,False,True,False,False,False,False,True,VSPSSSSTLL,1358,sample1,46.67597607663383,C*01:02,0.06457851023878902,0.6940808873475162,0.4474999999999767,HLA-C*01:02,0.052,107.3\n1372,MAGEC1p3,25,11,8,18,IPVSPSSSSTL,False,True,False,False,False,False,True,IPVSPSSSSTL,1372,sample1,224.04407533977502,B*08:01,0.14942678052466363,0.4003023274794277,1.0576086956520356,HLA-C*01:02,0.102,266.27\n1395,PAGE2,25,8,16,23,TFDLTKVL,False,True,False,False,False,False,True,TFDLTKVL,1395,sample1,192.2698234992499,C*01:02,0.4834165219217539,0.7162742705527396,0.4113315217391147,HLA-C*01:02,6.084,17134.66\n1406,PAGE2,25,9,9,17,VREGIMPTF,False,True,False,False,False,False,True,VREGIMPTF,1406,sample1,52.82333005903476,C*07:01,0.09807718684896827,0.6937290619275586,0.4474999999999767,HLA-C*07:01,0.277,878.36\n1427,PAGE2,25,10,13,22,IMPTFDLTKV,False,True,False,False,False,False,True,IMPTFDLTKV,1427,sample1,482.5819436701458,C*01:02,0.3368768533691764,0.3807632213974432,1.1085597826085376,HLA-C*01:02,0.407,1532.39\n1443,PAGE2,25,11,13,23,IMPTFDLTKVL,False,True,False,False,False,False,True,IMPTFDLTKVL,1443,sample1,1917.037898539047,C*01:02,0.15068084315862507,0.07691883261218202,3.5626902173912214,HLA-C*01:02,0.196,620.72\n1465,PAGE5,24,9,3,11,VREGTLPTF,False,True,False,False,False,False,True,VREGTLPTF,1465,sample1,52.2543355162559,C*07:01,0.05408053379505873,0.6620549703835366,0.5038043478260334,HLA-C*07:01,0.411,1309.76\n1485,PAGE5,24,10,7,16,TLPTFDPTKV,False,True,False,False,False,False,True,TLPTFDPTKV,1485,sample1,220.1699502138865,C*01:02,0.4927303232252598,0.6958028106023945,0.44383152173911355,HLA-C*01:02,1.026,3936.55\n1500,PAGE5,24,11,7,17,TLPTFDPTKVL,False,True,False,False,False,False,True,TLPTFDPTKVL,1500,sample1,213.15764776188504,C*01:02,0.19725923147052526,0.45348878030270307,0.9252717391303236,HLA-C*01:02,0.22,721.1\n1530,XAGE1Ap1,25,9,5,13,QLKVGILHL,False,True,False,False,False,False,True,QLKVGILHL,1530,sample1,77.25077298009815,B*08:01,0.17316756071522832,0.671301090626941,0.4883152173912748,HLA-B*08:01,0.792,665.99\n1541,XAGE1Ap1,25,9,16,24,RQKKIRIQL,False,True,False,False,False,False,True,RQKKIRIQL,1541,sample1,49.26971587709329,C*07:01,0.6224369034171104,0.9393752719126912,0.06312499999999943,HLA-B*08:01,1.2,1118.37\n1558,XAGE1Ap1,25,10,16,25,RQKKIRIQLR,False,True,False,False,False,False,True,RQKKIRIQLR,1558,sample1,211.91169822956056,B*27:05,0.6150255762040615,0.7853669958109232,0.3009239130434338,HLA-B*27:05,1.751,863.35\n1592,XAGE1Ap2,25,9,1,9,ISQTPGINL,False,True,False,False,False,False,True,ISQTPGINL,1592,sample1,48.26454439433474,C*01:02,0.26859587989747524,0.8188534498605134,0.24815217391304145,HLA-C*01:02,0.613,2378.74\n1594,XAGE1Ap2,25,9,3,11,QTPGINLDL,False,True,False,False,False,False,True,QTPGINLDL,1594,sample1,72.94023535305432,C*01:02,0.19312988221645355,0.698565580401679,0.4409239130434628,HLA-C*01:02,0.201,648.84\n"
  },
  {
    "path": "setup.py",
    "content": "# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport logging\nimport re\n\nfrom setuptools import setup\n\n\nreadme_dir = os.path.dirname(__file__)\nreadme_filename = os.path.join(readme_dir, \"README.md\")\n\ntry:\n    with open(readme_filename, \"r\") as f:\n        readme = f.read()\nexcept:\n    logging.warning(\"Failed to load %s\" % readme_filename)\n    readme = \"\"\n\n\nwith open(\"mhcflurry/version.py\", \"r\") as f:\n    version = re.search(\n        r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]', f.read(), re.MULTILINE\n    ).group(1)\n\nif __name__ == \"__main__\":\n    required_packages = [\n        \"pandas>=2.0\",\n        \"appdirs\",\n        \"scikit-learn\",\n        \"mhcgnomes>=3.0.1\",\n        \"numpy>=1.22.4\",\n        \"pyyaml\",\n        \"tqdm\",\n        \"torch>=2.0.0\",\n    ]\n\n    setup(\n        name=\"mhcflurry\",\n        version=version,\n        description=\"MHC Binding Predictor\",\n        author=\"Tim O'Donnell and Alex Rubinsteyn\",\n        author_email=\"timodonnell@gmail.com\",\n        url=\"https://github.com/openvax/mhcflurry\",\n        license=\"Apache-2.0\",\n        entry_points={\n            \"console_scripts\": [\n                \"mhcflurry-downloads = mhcflurry.downloads_command:run\",\n                \"mhcflurry-predict = mhcflurry.predict_command:run\",\n                \"mhcflurry-predict-scan = mhcflurry.predict_scan_command:run\",\n                \"mhcflurry-class1-train-allele-specific-models = \"\n                \"mhcflurry.train_allele_specific_models_command:run\",\n                \"mhcflurry-class1-train-pan-allele-models = \"\n                \"mhcflurry.train_pan_allele_models_command:run\",\n                \"mhcflurry-class1-train-processing-models = \"\n                \"mhcflurry.train_processing_models_command:run\",\n                \"mhcflurry-class1-select-allele-specific-models = \"\n                \"mhcflurry.select_allele_specific_models_command:run\",\n                \"mhcflurry-class1-select-pan-allele-models = \"\n                \"mhcflurry.select_pan_allele_models_command:run\",\n                \"mhcflurry-class1-select-processing-models = \"\n                \"mhcflurry.select_processing_models_command:run\",\n                \"mhcflurry-calibrate-percentile-ranks = \"\n                \"mhcflurry.calibrate_percentile_ranks_command:run\",\n                \"mhcflurry-class1-train-presentation-models = \"\n                \"mhcflurry.train_presentation_models_command:run\",\n                \"_mhcflurry-cluster-worker-entry-point = \"\n                \"mhcflurry.cluster_parallelism:worker_entry_point\",\n            ]\n        },\n        python_requires=\">=3.10\",\n        classifiers=[\n            \"Development Status :: 5 - Production/Stable\",\n            \"Environment :: Console\",\n            \"Operating System :: OS Independent\",\n            \"Intended Audience :: Science/Research\",\n            \"Programming Language :: Python :: 3\",\n            \"Programming Language :: Python :: 3.10\",\n            \"Programming Language :: Python :: 3.11\",\n            \"Programming Language :: Python :: 3.12\",\n            \"Topic :: Scientific/Engineering :: Bio-Informatics\",\n        ],\n        package_data={\n            \"mhcflurry\": [\"downloads.yml\"],\n        },\n        install_requires=required_packages,\n        long_description=readme,\n        long_description_content_type=\"text/markdown\",\n        packages=[\n            \"mhcflurry\",\n        ],\n    )\n"
  },
  {
    "path": "setup_local_env.sh",
    "content": "#!/bin/bash\nset -e\n\nVENV_DIR=\".venv\"\n\n# Create virtual environment if it doesn't exist\nif [ ! -d \"$VENV_DIR\" ]; then\n    echo \"Creating virtual environment...\"\n    python -m venv \"$VENV_DIR\"\nelse\n    echo \"Virtual environment already exists.\"\nfi\n\n# Activate and install\necho \"Activating virtual environment and installing mhcflurry...\"\nsource \"$VENV_DIR/bin/activate\"\npip install -e .\n\necho \"\"\necho \"Done! To use this environment, run:\"\necho \"  source .venv/bin/activate\"\n"
  },
  {
    "path": "test/__init__.py",
    "content": "'''\nUtility functions for tests.\n'''\n\nimport os\nimport time\n\n\ndef data_path(name):\n    '''\n    Return the absolute path to a file in the test/data directory.\n    The name specified should be relative to test/data.\n    '''\n    return os.path.join(os.path.dirname(__file__), \"data\", name)\n\n\ndef initialize():\n    '''\n    Initialize logging and PyTorch, numpy, and python random seeds.\n    '''\n    import logging\n    logging.getLogger(\"matplotlib\").disabled = True\n\n    import numpy\n    import random\n    import torch\n\n    seed = int(os.environ.get(\"MHCFLURRY_TEST_SEED\", 1))\n    if seed == 0:\n        # Enable nondeterminism\n        seed = int(time.time())\n    print(\"Using random seed\", seed)\n\n    # Set seeds for reproducibility\n    numpy.random.seed(seed)\n    random.seed(seed)\n    torch.manual_seed(seed)\n    if torch.cuda.is_available():\n        torch.cuda.manual_seed_all(seed)\n\n    # Enable deterministic operations where possible\n    torch.use_deterministic_algorithms(False)  # Some ops don't have deterministic impl\n"
  },
  {
    "path": "test/conftest.py",
    "content": "\"\"\"\nPytest configuration and session-wide initialization.\n\"\"\"\nfrom . import initialize\n\n# Ensure deterministic test setup without per-file initialize() calls.\ninitialize()\n\ndef pytest_configure(config):\n    # Register custom marks used across tests.\n    config.addinivalue_line(\"markers\", \"slow: marks tests as slow\")\n\n    # PyTorch warns that padding='same' with even kernels may allocate a\n    # temporary padded copy. This is expected for our processing defaults.\n    config.addinivalue_line(\n        \"filterwarnings\",\n        (\n            \"ignore:Using padding='same' with even kernel lengths and odd \"\n            \"dilation may require a zero-padded copy of the input be created.*:\"\n            \"UserWarning:torch\\\\.nn\\\\.modules\\\\.conv\"\n        ),\n    )\n"
  },
  {
    "path": "test/data/data_10mer.csv",
    "content": "peptide,mhc,meas\nAAAAAAAAAA,HLA-A0201,400"
  },
  {
    "path": "test/data/data_8mer.csv",
    "content": "peptide,mhc,meas\nAAAAAAAA,HLA-A0201,400"
  },
  {
    "path": "test/data/data_9mer.csv",
    "content": "peptide,mhc,meas\nAAAAAAAAA,HLA-A0201,400"
  },
  {
    "path": "test/data/example.fasta",
    "content": ">QHN73810.1 surface glycoprotein [Severe acute respiratory syndrome coronavirus 2] prefix\nMFVFLVLLPLVSSQCVNLTTRTQLPPAYTNSFTRGVYYPDKVfrssVLHSTQDLFLPFFSNVTWFHAIHV\nSGTNGTKRFDNPVLPFNDGVYFASTEKSNIIRGWIFGTTLDSKTQSLLIVNNATNVVIKVCEFQFCNDPF\nLGVYYHKNNKSWMESEFRVYSSANNCTFEYVSQPFLMDLEGKQGNFKNLREFVFKNIDGYFKIYSKHTPI\n>protein1\nMDSKGSSQKGSRLLLLLVVSNLLLCQGVVSTPVCPNGPGNCQV\nEMFNEFDKRYAQGKGFITMALNSCHTSSLPTPEDKEQAQQTHH\n>protein2\nVTEVRGMKGAPDAILSRAIEIEEENKRLLEGMEMIFGQVIPGA\nARYSAFYNLLHCLRRDSSKIDTYLKLLNCRIIYNNNC\n"
  },
  {
    "path": "test/data/hpv_predictions.csv",
    "content": "allele,peptide,Length,Affinity (uM),Binding Capacity,Status,Security,affinity,netmhcpan3,netmhcpan4,netmhc,MHCflurry 1.2.0,MHCflurry (train MS),MHCflurry (no MS)\nHLA-A*02:01,LMGTLGIVCPI,11,0.29,strong,novel,CONFIDENTIAL,290.0,261.8,214.5,248.47,167.22380960425482,74.84751152620106,301.93403988869073\nHLA-A*02:01,RTLEDLLMGTL,11,1.95,strong,novel,CONFIDENTIAL,1950.0,3836.9,2582.2,3257.01,2830.2157958268767,2188.9538132772736,2132.3345643653206\nHLA-A*02:01,MGTLGIVCPI,10,2.01,strong,novel,CONFIDENTIAL,2010.0,322.0,296.3,393.31,7561.020273281272,7718.081462632219,4225.447860121435\nHLA-A*02:01,YMLDLQPETTD,11,2.83,strong,novel,CONFIDENTIAL,2830.0,11400.9,9155.6,4351.54,6287.6952523646105,9355.44304469742,5658.571030354306\nHLA-A*02:01,FQDPQERPI,9,3.36,strong,novel,CONFIDENTIAL,3360.0,2044.2,2257.7,1166.07,735.5087046488492,1298.7161222243133,1665.7433194119546\nHLA-A*02:01,TTIHEIILECV,11,3.57,strong,novel,CONFIDENTIAL,3570.0,8613.5,4527.0,3024.93,2554.8469340269025,1810.7353167190304,1699.1128317557593\nHLA-A*02:01,KLPDLCTELQT,11,3.9,strong,novel,CONFIDENTIAL,3900.0,11935.0,6485.2,6742.69,1493.95057048418,2041.3286363075747,3488.3542353067187\nHLA-A*02:01,TIHEIILECV,10,6.61,intermediate,novel,CONFIDENTIAL,6610.0,451.8,408.9,259.31,1428.0179037333828,698.7850153544011,1037.8827485898053\nHLA-A*02:01,TIHEIRLECV,10,8.62,intermediate,novel,CONFIDENTIAL,8620.0,2086.4,1587.4,1719.29,2623.970703334057,1942.8046329664567,2620.8693158569736\nHLA-A*02:01,YMLDLQPE,8,9.69,intermediate,novel,CONFIDENTIAL,9690.0,7303.5,8102.5,5042.35,782.0608447431365,981.9365057280755,2292.0327441099435\nHLA-A*02:01,ELQTTIHEI,9,10.61,intermediate,novel,CONFIDENTIAL,10610.0,2957.5,2929.5,1769.68,2295.581304708852,1796.3302915309757,2763.6730228020515\nHLA-A*02:01,EDLLMGTLGIV,11,12.07,intermediate,novel,CONFIDENTIAL,12070.0,1599.5,1510.9,971.14,14678.11653386876,18844.65527176244,11576.74126684321\nHLA-A*02:01,TTIHEIRLECV,11,14.01,intermediate,novel,CONFIDENTIAL,14010.0,17971.1,12699.0,10173.59,5309.987246377647,3922.455924078571,6448.9484310722155\nHLA-A*02:01,FQDPQERPIKL,11,14.03,intermediate,novel,CONFIDENTIAL,14030.0,11214.0,6086.6,8579.48,2232.255948165024,2319.270770821285,5867.236637410394\nHLA-A*02:01,KLPQLCTELQT,11,27.145,weak,novel,CONFIDENTIAL,27145.0,16616.3,8531.1,10551.05,4005.921189159136,5135.113451729935,7277.304901511617\nHLA-A*02:01,RYYCYSVYGT,10,28.2,weak,novel,CONFIDENTIAL,28200.0,13147.0,15049.1,18721.52,9613.502999977934,9152.728921945634,9621.349989269409\nHLA-A*02:01,YRYYCYSV,8,29.43,weak,novel,CONFIDENTIAL,29430.0,22112.3,19446.0,9128.42,906.80060226134,659.7409889715294,2156.7317840689675\nHLA-A*02:01,YRYYCYSL,8,32.78,weak,novel,CONFIDENTIAL,32780.0,23243.3,20212.7,14919.58,2964.261133692841,2320.4297748900585,6774.905546806434\nHLA-A*02:01,TTIHDIILECV,11,35.72063432,weak,novel,CONFIDENTIAL,35720.63432,7569.3,3945.1,3483.89,1682.8116357450924,999.0395725488116,859.2392689717731\nHLA-A*02:01,LLMGTLGIVCP,11,47.53,weak,novel,CONFIDENTIAL,47530.0,3969.1,2993.6,3239.61,4453.276686751863,5139.813945807773,3630.920394105956\nHLA-A*02:01,ILECVYCKQQL,11,48.75666667,weak,novel,CONFIDENTIAL,48756.66667,5305.8,4136.1,8507.65,4865.669632619836,7471.308350302104,6224.036410094804\nHLA-A*02:01,IRTLEDLLMGT,11,54.56,weak,novel,CONFIDENTIAL,54560.0,9448.4,5149.0,8339.86,15749.863216740647,17063.417773345544,12394.628341531134\nHLA-A*02:01,MLDLQPET,8,59.36,weak,novel,CONFIDENTIAL,59360.0,9487.8,11631.9,4072.64,4314.772648482042,4568.488501987155,7890.133873821171\nHLA-A*02:01,KISEYRYYCYS,11,65.6,weak,novel,CONFIDENTIAL,65600.0,21152.4,18205.2,25066.62,7743.224282745778,8249.90329061251,7770.289216385544\nHLA-A*02:01,SEYRYYCYSV,10,76.64,weak,novel,CONFIDENTIAL,76640.0,6925.8,7485.7,6255.27,1083.5856435034402,1243.166012373093,2682.1753853576192\nHLA-A*02:01,SEYRYYCYSL,10,78.22,weak,novel,CONFIDENTIAL,78220.0,12376.5,14030.9,11628.92,3409.681674777167,3911.731710904121,7951.765029734319\nHLA-A*02:01,TIHEIILEC,9,82.01,weak,novel,CONFIDENTIAL,82010.0,3635.3,3470.3,3629.49,7046.813797831271,6971.597573231845,8045.3140793790035\nHLA-A*02:01,DLLMGTLGIVC,11,83.34,weak,novel,CONFIDENTIAL,83340.0,5638.5,10687.9,2747.2,8054.668925826633,11427.961477186245,2380.608862941643\nHLA-A*01:01,CTELQTTIH,9,1.4,strong,novel,CONFIDENTIAL,1400.0,1036.7,2136.9,6705.74,1528.1812706714318,1429.5692489150265,2060.9423819981425\nHLA-A*01:01,DLQPETTDLY,10,1.77,strong,novel,CONFIDENTIAL,1770.0,2245.9,1323.5,3675.65,193.50561967984606,201.231897331688,290.17355954864087\nHLA-A*01:01,AVCDKCLKFY,10,2.7,strong,novel,CONFIDENTIAL,2700.0,1140.4,1547.3,3210.16,6849.596682068725,8034.716592367712,5342.243682887992\nHLA-A*01:01,LKFYSKISEY,10,7.7,intermediate,novel,CONFIDENTIAL,7700.0,14948.7,15773.3,17014.02,12422.761301072042,12400.829967565891,10658.091389039851\nHLA-A*01:01,GTTLEQQY,8,16.2,weak,novel,CONFIDENTIAL,16200.0,15250.7,14780.6,11701.75,1059.4330640275084,1520.6973609096137,5918.9567991423155\nHLA-A*01:01,KISEYRHYCY,10,18.4,weak,novel,CONFIDENTIAL,18400.0,1377.0,1230.1,330.74,1453.3474707081182,1482.838019653631,1479.8110134773426\nHLA-A*01:01,DTPTLHEY,8,32.0,weak,novel,CONFIDENTIAL,32000.0,7096.4,8641.0,6601.07,1203.6621607075274,1486.3283925246994,5269.391140890927\nHLA-A*03:01,LIRCINCQK,9,4.67,strong,novel,CONFIDENTIAL,4670.0,162.3,145.8,375.07,204.0478698313392,435.8311949480205,350.2389716370093\nHLA-A*03:01,GIVCPICSQK,10,6.49,intermediate,novel,CONFIDENTIAL,6490.0,156.6,157.8,343.48,215.34416992422456,230.1221633521149,151.1602721982741\nHLA-A*03:01,AVCDKCLKFY,10,9.29,intermediate,novel,CONFIDENTIAL,9290.0,990.5,1364.5,932.09,2047.2596287287881,2735.4138567447353,2745.2271592296443\nHLA-A*03:01,SLYGTTLEQQY,11,10.06,intermediate,novel,CONFIDENTIAL,10060.0,162.5,131.8,233.79,1272.2137029730231,1360.821450087417,1225.4807167540018\nHLA-A*03:01,KFYSKISEYR,10,11.72,intermediate,novel,CONFIDENTIAL,11720.0,924.2,1046.0,319.36,535.3088903701828,659.2740478699708,599.8433012394488\nHLA-A*03:01,GTTLEQQYNK,10,17.33,weak,novel,CONFIDENTIAL,17330.0,1092.1,804.8,1292.32,1025.619711037487,1096.7490729173253,1019.6007981576853\nHLA-A*03:01,KQRFHNIRGR,10,21.01,weak,novel,CONFIDENTIAL,21010.0,1768.7,1270.2,1602.6,932.8032378723947,854.2980408145897,1234.1362547195588\nHLA-A*03:01,CVYCKQQLLR,10,30.56,weak,novel,CONFIDENTIAL,30560.0,377.7,319.8,112.79,56.32241532369268,52.70881959032983,52.85686131251664\nHLA-A*03:01,AVCDKCLK,8,34.39,weak,novel,CONFIDENTIAL,34390.0,5533.2,7695.2,6961.08,219.5139365424112,772.7494025534432,1790.5815297337801\nHLA-A*03:01,RHYCYSLY,8,58.09,weak,novel,CONFIDENTIAL,58090.0,10536.2,12510.7,10604.15,5011.41873021642,4586.303522172484,5479.046498205236\nHLA-A*11:01,FAFRDLCIVYR,11,2.67,strong,novel,CONFIDENTIAL,2670.0,1597.0,1170.5,6087.15,198.5418558836729,767.7842293451082,759.5269541462752\nHLA-A*11:01,AVCDKCLKFY,10,4.8,strong,novel,CONFIDENTIAL,4800.0,533.8,419.6,219.88,488.06798984272723,1069.0895760249005,1703.2245972516055\nHLA-A*11:01,LIRCINCQK,9,5.21,intermediate,novel,CONFIDENTIAL,5210.0,287.3,338.0,1089.12,398.9452823944749,1091.5759819174523,1239.654411219963\nHLA-A*11:01,TLEQQYNK,8,6.03,intermediate,novel,CONFIDENTIAL,6030.0,5308.2,7464.8,3131.9,376.55410528228134,2962.719959672343,4638.566296504541\nHLA-A*11:01,LGIVCPICSQK,11,6.32,intermediate,novel,CONFIDENTIAL,6320.0,1398.0,1627.3,1807.2,1571.9827102006725,3357.029177949049,1283.0588085190461\nHLA-A*11:01,ILECVYCK,8,8.34,intermediate,novel,CONFIDENTIAL,8340.0,2359.2,4510.3,4750.58,162.8377973720471,1098.030205507943,1498.1961045921537\nHLA-A*11:01,AVCDKCLKFYS,11,11.51,intermediate,novel,CONFIDENTIAL,11510.0,11471.3,12747.5,9182.5,760.7260316491818,2889.8862445569057,2580.9365830201405\nHLA-A*11:01,RCMSCCRSSR,10,12.19,intermediate,novel,CONFIDENTIAL,12190.0,1721.1,1956.4,1927.66,10366.764450311626,17378.519334733573,16695.884904110924\nHLA-A*11:01,YAVCDKCLK,9,12.64,intermediate,novel,CONFIDENTIAL,12640.0,979.9,834.3,1462.48,305.35638363147206,1246.2218943284665,1720.2926725701159\nHLA-A*11:01,DLLIRCINCQK,11,14.75,intermediate,novel,CONFIDENTIAL,14750.0,3140.1,5360.6,4278.29,3141.671074869477,6922.462818808072,7522.20865864938\nHLA-A*11:01,YNIVTFCCK,9,15.83,weak,novel,CONFIDENTIAL,15830.0,961.0,780.0,2722.16,1366.3231568518531,1823.5975197172027,1245.35475751254\nHLA-A*11:01,PYAVCDKCLK,10,15.99,weak,novel,CONFIDENTIAL,15990.0,7038.1,7230.3,8296.39,6694.072757669444,7382.799710571277,6964.978182003566\nHLA-A*11:01,NIVTFCCK,8,17.88,weak,novel,CONFIDENTIAL,17880.0,5011.1,7719.3,8169.19,177.44387922524263,1271.0049548564928,1324.9382491538568\nHLA-A*11:01,EVYDFAFR,8,27.75,weak,novel,CONFIDENTIAL,27750.0,4961.4,6460.5,9341.53,146.23879735518875,1179.4278141381164,803.5471316545512\nHLA-A*11:01,AHYNIVTFCCK,11,28.95,weak,novel,CONFIDENTIAL,28950.0,5908.5,3047.8,4515.23,1799.4665054888096,4097.776212326945,2556.585920952502\nHLA-A*11:01,NPYAVCDKCLK,11,31.11,weak,novel,CONFIDENTIAL,31110.0,18020.2,12853.2,19279.42,5561.112420245893,8852.317456986519,8280.409444184594\nHLA-A*11:01,CVYCKQQLLR,10,36.49,weak,novel,CONFIDENTIAL,36490.0,348.8,356.5,506.27,54.98115951529731,125.60064701018942,161.45982239468202\nHLA-A*11:01,YAVCDKCLKFY,11,40.88,weak,novel,CONFIDENTIAL,40880.0,7708.6,10967.8,2634.65,7952.677805963036,12250.765710981861,16787.055726162154\nHLA-A*11:01,YGTTLEQQYNK,11,43.48,weak,novel,CONFIDENTIAL,43480.0,956.5,539.9,962.76,4318.5973365009995,7398.7202017867285,7566.772095061476\nHLA-A*11:01,AFRDLCIVYR,10,47.68,weak,novel,CONFIDENTIAL,47680.0,3801.2,2782.3,6053.46,2622.759087481481,4940.091758791927,3030.899009631936\nHLA-A*11:01,AVCDKCLKF,9,48.82,weak,novel,CONFIDENTIAL,48820.0,12744.2,10508.3,9085.16,3531.4624077727067,6345.713974481844,6720.213609332939\nHLA-A*11:01,KISEYRHYCY,10,48.83,weak,novel,CONFIDENTIAL,48830.0,553.5,715.8,263.87,96.67683826219296,125.95280812849022,97.50901080742322\nHLA-A*11:01,VCDKCLKFYSK,11,58.43,weak,novel,CONFIDENTIAL,58430.0,20498.8,16472.8,16577.36,2927.256576617747,5664.311988434344,4358.709052700608\nHLA-A*11:01,WTGRCMSCCR,10,82.74,weak,novel,CONFIDENTIAL,82740.0,4430.8,5391.6,15228.16,3411.959775768269,5846.378194697021,5182.588436200903\nHLA-A*24:02,VYDFAFRDLCI,11,0.256666667,strong,novel,CONFIDENTIAL,256.6666667,2621.4,2073.9,2480.67,648.37494940904,1484.3033637780072,5029.309818093023\nHLA-A*24:02,PYAVCDKCLKF,11,0.28,strong,novel,CONFIDENTIAL,280.0,3417.5,1843.7,1976.58,83.28087797328301,243.88324986590942,3576.553843681802\nHLA-A*24:02,VYGTTLEQQY,10,0.68,strong,novel,CONFIDENTIAL,680.0,8525.7,8380.9,6614.94,1629.7666773485894,3107.701590567886,4106.562015411532\nHLA-A*24:02,TFCCKCDSTL,10,0.7533333329999999,strong,novel,CONFIDENTIAL,753.3333332999997,5243.0,8383.4,9077.1,3332.1846862068824,5905.839785214386,4944.675457011142\nHLA-A*24:02,QYNKPLCDLLI,11,0.7666666670000001,strong,novel,CONFIDENTIAL,766.6666667000003,1659.7,1162.3,2768.8,134.74745049246454,402.8452453716209,542.4886114927364\nHLA-A*24:02,KLPQLCTEL,9,0.83,strong,novel,CONFIDENTIAL,830.0,6113.0,6747.1,11100.88,1551.3652961517096,3712.5243927747533,5494.156914058253\nHLA-A*24:02,VYRDGNPYAV,10,1.06,strong,novel,CONFIDENTIAL,1060.0,1933.7,2111.0,3259.54,402.34460018719125,1015.4575794850017,1397.3121809349875\nHLA-A*24:02,YAVCDKCLKF,10,1.093333333,strong,novel,CONFIDENTIAL,1093.333333,7763.1,7692.7,12914.62,4306.466832601471,6667.05677309083,9985.12989112205\nHLA-A*24:02,DRAHYNIVTF,10,1.316666667,strong,novel,CONFIDENTIAL,1316.666667,11145.0,12097.2,6105.89,2428.8140888228404,4939.367664513643,7255.9444215451795\nHLA-A*24:02,LQTTIHEII,9,1.65,strong,novel,CONFIDENTIAL,1650.0,12345.4,11328.9,7819.96,5914.300147570152,7860.195300661325,2281.5229434808784\nHLA-A*24:02,EYRHYCYSLY,10,1.983333333,strong,novel,CONFIDENTIAL,1983.3333329999998,7319.8,7218.3,6231.43,703.2658386654582,1515.9408499157364,1477.5881767756796\nHLA-A*24:02,AHYNIVTF,8,2.2766666669999998,strong,novel,CONFIDENTIAL,2276.666667,7995.6,10388.4,12213.63,535.0480605196514,830.1737520796222,2491.9112158124303\nHLA-A*24:02,VYCKQQLL,8,2.4533333330000002,strong,novel,CONFIDENTIAL,2453.333333,2262.2,3318.8,1910.18,52.27313383518106,88.41831947379872,575.635438043838\nHLA-A*24:02,DFAFRDLCI,9,3.353333333,strong,novel,CONFIDENTIAL,3353.333333,6895.2,8358.8,5299.67,2321.9605565176307,4833.893616635816,2774.791392983021\nHLA-A*24:02,YSLYGTTL,8,3.356666667,strong,novel,CONFIDENTIAL,3356.666667,9147.3,10801.2,5320.23,7199.2723693321095,9213.709378071613,13122.858103833583\nHLA-A*24:02,FYSKISEY,8,3.36,strong,novel,CONFIDENTIAL,3360.0,12309.7,12705.1,17137.81,2205.404661591299,3581.325026470185,6557.405367260899\nHLA-A*24:02,VYDFAFRDLC,10,4.303333333,strong,novel,CONFIDENTIAL,4303.333333,6027.3,5872.8,7810.15,3261.5361858609203,4541.968966974872,5575.954542513338\nHLA-A*24:02,EVYDFAFRDL,10,4.7466666669999995,strong,novel,CONFIDENTIAL,4746.666667,7778.9,7300.7,10889.37,10584.928627802516,14998.341370715189,16265.726335821162\nHLA-A*24:02,FYSKISEYRH,10,4.76,strong,novel,CONFIDENTIAL,4760.0,17447.3,19539.6,4653.72,2557.610445090333,4367.497745288996,5044.566675718917\nHLA-A*24:02,HYNIVTFCC,9,6.06,intermediate,novel,CONFIDENTIAL,6060.0,5297.5,7451.3,3700.51,883.2590003262035,1556.9247929275973,1614.7965128506924\nHLA-A*24:02,CYSLYGTTLE,10,6.8666666670000005,intermediate,novel,CONFIDENTIAL,6866.666667,3660.3,5860.6,1124.93,1459.6902336809735,2496.7356419659945,3525.4328802793743\nHLA-A*24:02,LYGTTLEQQY,10,7.08,intermediate,novel,CONFIDENTIAL,7080.0,9536.2,11002.8,10662.6,1941.517006322411,3576.9833205995587,4657.422623464599\nHLA-A*24:02,KCLKFYSKI,9,7.503333333,intermediate,novel,CONFIDENTIAL,7503.333333,14133.4,17530.9,2341.86,699.1338794910405,1171.0981253622886,1770.7628128622314\nHLA-A*24:02,LECVYCKQQL,10,8.133333333,intermediate,novel,CONFIDENTIAL,8133.333333,38821.8,37065.8,37771.0,10864.99221187379,11788.509292507915,19923.786170055457\nHLA-A*24:02,EYRYYCYSV,9,8.64,intermediate,novel,CONFIDENTIAL,8640.0,972.8,1102.6,2521.31,443.0731091832768,775.765326629387,2495.118062975035\nHLA-A*24:02,FYSKISEYRHY,11,10.6,intermediate,novel,CONFIDENTIAL,10600.0,11705.4,8762.6,14075.75,2915.085842405545,5116.743170286183,10544.696205113243\nHLA-A*24:02,PYAVCDKCLK,10,10.92666667,intermediate,novel,CONFIDENTIAL,10926.66667,25168.5,27683.3,26543.21,6885.730236669599,9592.873322236668,13304.621298708753\nHLA-A*24:02,YDFAFRDLCI,10,11.01333333,intermediate,novel,CONFIDENTIAL,11013.33333,2877.7,4657.9,2956.78,12352.424311027677,16119.501861361616,17724.81775456142\nHLA-A*24:02,SEYRHYCYSL,10,11.97666667,intermediate,novel,CONFIDENTIAL,11976.66667,480.4,743.8,825.17,4777.401079101041,8473.810658312603,12884.606885147292\nHLA-A*24:02,HYNIVTFC,8,16.50666667,weak,novel,CONFIDENTIAL,16506.66667,22350.0,21160.9,23030.23,4943.438196616697,5891.456284818602,17692.391313008437\nHLA-A*24:02,VYCKQQLLRR,10,22.28,weak,novel,CONFIDENTIAL,22280.0,25203.4,28044.8,13917.2,3204.1311394463733,5560.819014680763,6042.764371359264\nHLA-A*24:02,REVYDFAF,8,23.27,weak,novel,CONFIDENTIAL,23270.0,18654.2,23029.5,23691.66,1850.0543105860065,2478.076299821525,6735.1965030939145\nHLA-A*24:02,KKQRFHNIR,9,28.63666667,weak,novel,CONFIDENTIAL,28636.66667,36419.0,38575.1,35997.06,18840.464726183844,21098.373761501232,19027.297846877213\nHLA-A*24:02,KFYSKISEYRH,11,31.99666667,weak,novel,CONFIDENTIAL,31996.66667,32363.3,34292.3,19160.27,8006.412012230704,11191.558204349676,13840.797318332548\nHLA-A*24:02,CYSVYGTTLE,10,32.71,weak,novel,CONFIDENTIAL,32710.0,4245.8,6458.2,1649.91,1569.781391307999,2487.4000289810565,4869.80660107878\nHLA-A*24:02,YCYSLYGTTLE,11,36.66333333,weak,novel,CONFIDENTIAL,36663.33333,7610.9,9765.9,2313.58,16139.756539090044,18650.695843017987,23113.19583519468\nHLA-A*24:02,YSVYGTTL,8,37.87,weak,novel,CONFIDENTIAL,37870.0,11241.5,11347.0,7091.69,8065.2962258007765,10251.465162052817,14753.287936076544\nHLA-A*24:02,VYCKQQLLR,9,46.39666667,weak,novel,CONFIDENTIAL,46396.66667,17707.0,20124.1,6744.89,1776.1100492444934,3229.353309718729,4718.3223038401975\nHLA-A*24:02,KKQRFHNI,8,46.83666667,weak,novel,CONFIDENTIAL,46836.66667,30641.7,29623.0,24111.62,3941.4027541538735,5280.337684102296,10622.243090510694\nHLA-A*24:02,DKKQRFHNI,8,55.94666667,weak,novel,CONFIDENTIAL,55946.66667000001,39079.2,38729.5,25336.02,8260.198769933226,10568.709366954625,11615.771585035689\nHLA-A*24:02,HLDKKQRFHNI,11,60.8,weak,novel,CONFIDENTIAL,60800.0,35515.4,30329.4,35114.98,10190.34608252548,12609.65475151052,22871.03164018196\nHLA-A*24:02,VQSTHVDI,8,84.61333333,weak,novel,CONFIDENTIAL,84613.33333,33737.3,33855.8,29711.28,7667.765794228896,8752.810114048165,21345.212270250264\nHLA-A*24:02,DLYCYEQF,8,29.23,weak,novel,CONFIDENTIAL,29230.0,23229.4,20680.1,24389.75,2811.256108620112,4375.938952621141,11821.029639805005\nHLA-B*07:02,LIRCINCQKPL,11,0.16,strong,novel,CONFIDENTIAL,160.0,3064.8,4560.3,10601.75,1945.3949554892865,1759.2480536978715,13235.818200827416\nHLA-B*07:02,RPRKLPQLC,9,0.29,strong,novel,CONFIDENTIAL,290.0,99.6,70.6,286.03,57.26882685127524,59.91249013361661,172.89450329842686\nHLA-B*07:02,RGRWTGRCM,9,0.33,strong,novel,CONFIDENTIAL,330.0,544.1,737.1,167.14,88.23814698866963,80.05266353876195,72.67379631603724\nHLA-B*07:02,RPRKLPQLCT,10,0.53,strong,novel,CONFIDENTIAL,530.0,496.2,414.4,170.01,62.07504609258122,67.46746511764775,178.43235602677032\nHLA-B*07:02,RPRKLPQLCTE,11,0.81,strong,novel,CONFIDENTIAL,810.0,5681.8,4260.3,3632.24,304.6591113264488,285.9482746084626,3531.0372933871067\nHLA-B*07:02,RTRRETQL,8,1.1,strong,novel,CONFIDENTIAL,1100.0,3895.8,2410.3,4306.76,302.2911569113051,202.7473844585019,718.8179070458214\nHLA-B*07:02,TPTLHEYM,8,1.35,strong,novel,CONFIDENTIAL,1350.0,6604.3,5514.7,6866.75,1465.150113174896,1121.1330423351164,2429.2378272886103\nHLA-B*07:02,TPTLHEYML,9,2.27,strong,novel,CONFIDENTIAL,2270.0,505.2,481.8,521.93,118.06538967914535,140.81757469280657,92.07467249450524\nHLA-B*07:02,RAHYNIVTF,9,2.33,strong,novel,CONFIDENTIAL,2330.0,1801.2,1073.9,2595.16,1032.0194526410553,1071.074685897953,861.016487718305\nHLA-B*07:02,DPQERPRKL,9,2.93,strong,novel,CONFIDENTIAL,2930.0,6430.7,8586.4,6673.02,4447.920925284252,5032.4869670550315,12010.097114484184\nHLA-B*07:02,SSRTRRETQL,10,4.06,strong,novel,CONFIDENTIAL,4060.0,2089.8,2816.0,2016.89,2538.6524520076005,2004.4016130475518,4567.458624166775\nHLA-B*07:02,CPEEKQRHL,9,4.23,strong,novel,CONFIDENTIAL,4230.0,893.3,1528.5,2463.79,1394.2744805759644,2149.807140578608,872.0581690760785\nHLA-B*07:02,IVYRDGNPYAV,11,12.94,intermediate,novel,CONFIDENTIAL,12940.0,26980.1,26763.8,29570.16,7479.563593333048,6102.175369168072,9605.759184515084\nHLA-B*07:02,LEQQYNKPL,9,15.1,weak,novel,CONFIDENTIAL,15100.0,13788.5,14910.4,16353.78,6259.887601536141,8331.416242786941,15960.857492638128\nHLA-B*07:02,EPDRAHYNIV,10,19.27,weak,novel,CONFIDENTIAL,19270.0,2351.6,2782.2,1084.14,6278.3163427984155,7045.2443973048275,3790.5555744968437\nHLA-B*07:02,KPLCDLLIRCI,11,21.1,weak,novel,CONFIDENTIAL,21100.0,6869.7,7330.3,12509.87,801.2379327776382,643.0833456130414,2284.5703449437165\nHLA-B*07:02,KPLCDLLI,8,21.62,weak,novel,CONFIDENTIAL,21620.0,6518.4,5417.9,8852.66,1815.3360300680606,1719.2729086743375,8584.752287226422\nHLA-B*07:02,NIRGRWTGRCM,11,49.58,weak,novel,CONFIDENTIAL,49580.0,12004.9,10710.7,12157.85,1904.0345404286825,2252.3556481610667,6420.5002847491005\nHLA-B*07:02,LLRREVYDFAF,11,53.51,weak,novel,CONFIDENTIAL,53510.0,22487.3,25001.3,26384.57,2863.3737518256316,2094.5426998534267,6319.718643066938\nHLA-B*15:01,CLKFYSKISEY,11,0.29,strong,?,CONFIDENTIAL,290.0,978.1,1148.9,3333.73,100.12875865670289,89.79060745300224,482.6428077286937\nHLA-B*15:01,LKFYSKISEY,10,2.44,strong,?,CONFIDENTIAL,2440.0,328.6,360.4,670.13,464.4010900707389,365.56287514368285,226.36814205834855\nHLA-B*15:01,SEYRHYCYSLY,11,2.5,strong,?,CONFIDENTIAL,2500.0,1944.7,2054.2,11010.92,1401.9870967130635,2102.273337412692,3603.7637607179827\nHLA-B*15:01,AVCDKCLKFY,10,2.96,strong,?,CONFIDENTIAL,2960.0,3065.1,2645.1,1317.37,178.86701409141514,170.1696154317968,235.54338201813513\nHLA-B*15:01,RHYCYSLY,8,3.96,strong,?,CONFIDENTIAL,3960.0,6469.3,4021.1,9120.32,6001.048237865377,6277.851617263311,8776.537436401066\nHLA-B*15:01,GQAEPDRAHY,10,3.97,strong,?,CONFIDENTIAL,3970.0,38.7,26.9,18.62,17.40913832461758,22.93177948959837,18.25441985354535\nHLA-B*15:01,KISEYRHYCY,10,4.18,strong,?,CONFIDENTIAL,4180.0,722.1,673.9,532.56,188.0057025142708,285.77394447106343,265.58478819185194\nHLA-B*15:01,LQPETTDLY,9,4.87,strong,?,CONFIDENTIAL,4870.0,233.2,307.5,114.64,86.10233924690445,138.71362381007455,93.16515819544595\nHLA-B*15:01,AFRDLCIVY,9,5.23,intermediate,?,CONFIDENTIAL,5230.0,1030.7,1057.2,460.29,424.3561939924511,612.2269976892022,976.4649149914122\nHLA-B*15:01,FAFRDLCIVY,10,5.27,intermediate,?,CONFIDENTIAL,5270.0,30.0,54.0,72.32,85.35493740907567,113.36915764673607,144.0842521338272\nHLA-B*15:01,YRHYCYSLY,9,5.62,intermediate,?,CONFIDENTIAL,5620.0,2226.8,2396.0,4253.87,2232.1043002386045,4070.7985953310827,4481.40899937409\nHLA-B*15:01,LQPETTDLYCY,11,6.08,intermediate,?,CONFIDENTIAL,6080.0,2534.8,2382.0,1904.75,106.96510326211235,116.31407824150664,57.36903618129154\nHLA-B*15:01,KQQLLRREVY,10,6.23,intermediate,?,CONFIDENTIAL,6230.0,178.4,74.7,76.01,30.491063196285573,44.73951549010499,68.18582442576489\nHLA-B*15:01,CIVYRDGNPY,10,6.58,intermediate,?,CONFIDENTIAL,6580.0,125.9,114.3,110.45,40.53014006414743,45.807707973742865,31.46490517047732\nHLA-B*15:01,IVYRDGNPY,9,6.63,intermediate,?,CONFIDENTIAL,6630.0,46.1,31.9,61.09,52.87880709132674,84.07449075465877,47.86039440508893\nHLA-B*15:01,LLMGTLGIV,9,7.1,intermediate,?,CONFIDENTIAL,7100.0,860.8,937.1,2322.78,534.5426071153162,849.7323988294139,462.22498009594113\nHLA-B*15:01,SLYGTTLEQQY,11,7.19,intermediate,?,CONFIDENTIAL,7190.0,338.0,230.4,1740.59,117.23636408487276,234.18830949346,105.37219050298744\nHLA-B*15:01,QQYNKPLCDL,10,8.52,intermediate,?,CONFIDENTIAL,8520.0,1829.3,1621.3,1345.65,600.4679447031629,899.1971301644063,850.2194364474872\nHLA-B*15:01,KLPQLCTEL,9,8.8,intermediate,?,CONFIDENTIAL,8800.0,3978.6,4085.5,5949.82,3835.943090749321,7120.605089018278,7847.671348501214\nHLA-B*15:01,QQLLRREVY,9,8.9,intermediate,?,CONFIDENTIAL,8900.0,300.0,207.4,193.81,230.91940954822832,219.41800195333204,282.60995304812405\nHLA-B*15:01,LCIVYRDGNPY,11,10.49,intermediate,?,CONFIDENTIAL,10490.0,494.5,301.2,982.82,339.8685867172298,227.20500067595367,682.6748668050218\nHLA-B*15:01,LLRREVYDFAF,11,11.34,intermediate,?,CONFIDENTIAL,11340.0,381.6,399.3,1704.66,67.71174660960358,212.11992230183037,839.4445432934434\nHLA-B*15:01,LLRREVYDF,9,13.73,intermediate,?,CONFIDENTIAL,13730.0,87.9,94.0,281.84,73.4518778217546,114.8259665577371,62.3035983246114\nHLA-B*15:01,VYRDGNPY,8,14.08,intermediate,?,CONFIDENTIAL,14080.0,3951.2,3244.9,2753.92,813.0948639229259,1064.1938077744298,1394.6899762223227\nHLA-B*15:01,RAHYNIVTF,9,15.85,weak,?,CONFIDENTIAL,15850.0,64.7,62.2,82.18,59.67455706810693,86.34963096148407,35.39766823681614\nHLA-B*15:01,GQAEPDRAH,9,17.31,weak,?,CONFIDENTIAL,17310.0,1672.1,886.6,1526.88,627.7935489433132,1156.7461202151535,940.5329764058071\nHLA-B*15:01,TLHEYMLDL,9,19.1,weak,?,CONFIDENTIAL,19100.0,3961.2,4781.2,5412.44,3188.32234786814,4370.989395819677,6157.718865619735\nHLA-B*15:01,YAVCDKCLKFY,11,19.77,weak,?,CONFIDENTIAL,19770.0,2994.2,3383.8,11119.27,1313.5652926667344,1123.1035569308099,2332.3519035580634\nHLA-B*15:01,LQTTIHDII,9,20.8,weak,?,CONFIDENTIAL,20800.0,3201.7,3183.3,6475.15,3508.220617041432,5252.916723398049,3558.7993737839442\nHLA-B*15:01,QQLLRREVYDF,11,20.84,weak,?,CONFIDENTIAL,20840.0,4777.8,3005.2,6524.1,1395.5709140240094,1647.8974388280567,3234.8884455717102\nHLA-B*15:01,KQRFHNIRGRW,11,21.33,weak,?,CONFIDENTIAL,21330.0,10844.6,6519.9,12284.4,3209.069828170334,5037.071796544306,7191.905607823522\nHLA-B*15:01,AGQAEPDRAHY,11,21.56,weak,?,CONFIDENTIAL,21560.0,1404.6,1362.3,1407.49,523.3780922619607,1174.1391466687742,2108.223246766194\nHLA-B*15:01,YSKISEYRHY,10,21.64,weak,?,CONFIDENTIAL,21640.0,174.5,235.9,48.14,49.734400543630066,38.94847000263059,41.31760975565961\nHLA-B*15:01,QLLRREVY,8,28.48,weak,?,CONFIDENTIAL,28480.0,7374.1,4959.3,4022.01,334.1041331076923,497.43111535595193,996.4526394200656\nHLA-B*15:01,KQRHLDKKQRF,11,29.77,weak,?,CONFIDENTIAL,29770.0,4349.9,1481.7,3954.91,264.1992655778023,799.4991513146069,3186.967792936229\nHLA-B*15:01,YAVCDKCLKF,10,29.92,weak,?,CONFIDENTIAL,29920.0,341.8,721.3,1324.03,589.5003656676157,322.8184699303833,170.59555698221587\nHLA-B*15:01,HDIILECVY,9,32.57,weak,?,CONFIDENTIAL,32570.0,2732.9,4656.1,1976.11,1364.6114315053885,1536.2820888670612,1348.9721708118166\nHLA-B*15:01,TIHDIILECVY,11,37.79,weak,?,CONFIDENTIAL,37790.0,1072.7,1127.3,2106.16,595.6118816499967,979.8290459326907,3282.111249651654\nHLA-B*15:01,IVYRDGNPYA,10,40.0,weak,?,CONFIDENTIAL,40000.0,871.8,544.8,935.11,5431.139564386863,5685.053856764514,5664.246317872208\nHLA-B*15:01,SEYRHYCY,8,41.84,weak,?,CONFIDENTIAL,41840.0,6850.1,7695.0,8495.33,1628.083811935315,1895.3144773195527,2815.4525280584785\nHLA-B*15:01,LLMGTLGI,8,43.75,weak,?,CONFIDENTIAL,43750.0,6292.9,6789.1,14125.18,465.4261794512378,605.1702826248901,1208.890677643299\nHLA-B*15:01,RGRWTGRCM,9,45.1,weak,?,CONFIDENTIAL,45100.0,4008.0,3340.2,2544.8,1820.363713752112,3486.2518669557435,4006.836697906264\nHLA-B*15:01,AVCDKCLKF,9,51.89,weak,?,CONFIDENTIAL,51890.0,1434.1,1425.9,2322.91,762.744945580612,819.5206751963057,640.1132456383094\nHLA-B*15:01,SKISEYRHY,9,56.55,weak,?,CONFIDENTIAL,56550.0,1435.6,1678.9,2651.99,3236.554371663517,4229.655776556141,2363.137677540985\nHLA-B*15:01,KISEYRHY,8,57.15,weak,?,CONFIDENTIAL,57150.0,4275.6,3402.6,3430.92,212.9623859156265,389.45988157147883,426.76252622744016\nHLA-B*15:01,CQKPLCPEE,9,67.81,weak,?,CONFIDENTIAL,67810.0,16285.1,13486.3,11536.18,7714.102386831012,11886.001598087749,7346.684977628699\nHLA-B*15:01,LRREVYDFAF,10,69.3,weak,?,CONFIDENTIAL,69300.0,5789.7,4932.3,6139.74,1691.4770197820444,3493.9972978175706,3106.905203122733\nHLA-B*15:01,QLLRREVYDF,10,77.7,weak,?,CONFIDENTIAL,77700.0,1215.3,1582.7,2564.26,330.5604649673025,416.60424757969736,437.37861422193896\nHLA-B*15:01,FYSKISEY,8,94.5,weak,?,CONFIDENTIAL,94500.0,2369.7,2379.5,7637.92,2414.7564272356617,3321.432577617953,4414.84818994649\nHLA-A*02:01,LLMGTLGI,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,184.0,328.6,212.27,40.10507816195131,36.68413487712265,150.4233501819733\nHLA-A*02:01,LMGTLGIV,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,1875.4,3868.3,1105.59,1731.2786926325007,1034.1209486251953,3385.6201356725123\nHLA-A*02:01,EYMLDLQPETT,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,6152.0,5367.0,1548.09,19607.41628195521,22932.299772195034,21660.167247021807\nHLA-A*02:01,LRLCVQSTHV,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1971.4,2131.1,1815.24,6258.1693122330225,8850.5216424582,11139.886334756673\nHLA-A*02:01,KISEYRYYC,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,675.0,1585.7,2349.63,1616.7235765366324,1361.5868928858724,1663.4471790977288\nHLA-A*02:01,YSLYGTTLEQ,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4849.4,6125.2,2507.49,4946.262186569977,7276.920108129971,5348.447744367745\nHLA-A*02:01,DLLMGTLGI,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,1327.9,1727.9,2529.15,1247.362388651159,736.5847222237112,721.9583442636213\nHLA-A*02:01,PLCDLLIRCI,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4404.1,6552.8,2553.9,6227.56022017545,5538.638762408619,9368.909416015516\nHLA-A*02:01,FAFRDLCIVY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,6485.2,8256.3,3191.53,6896.565730348924,8109.559671334478,7189.8532424441355\nHLA-A*02:01,IVYRDGNPYAV,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,2531.3,518.2,3287.59,291.0171642495432,384.71909049438193,563.1260951324526\nHLA-A*02:01,YCYSVYGTTL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,9502.8,8457.4,3314.52,4414.623268810618,4277.971516972154,4779.208919603183\nHLA-A*02:01,QLLRREVYDFA,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,5854.7,2327.3,3518.78,799.2711256419948,503.36698776390966,765.5631997379436\nHLA-A*02:01,MLDLQPETTDL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,4519.0,2556.8,3950.38,2691.260928057141,2899.896483807252,3845.047174100261\nHLA-A*02:01,IILECVYCK,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,17699.0,14731.4,4245.82,3546.5930165011573,4096.682345418097,5340.488501547996\nHLA-A*02:01,YRDGNPYAV,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,7286.9,4990.8,4312.26,697.3534782821471,793.9928564089412,702.6178592295229\nHLA-A*02:01,IILECVYC,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,13575.9,16281.2,5793.17,2605.483298879635,3189.755504599269,4549.1810265154745\nHLA-A*02:01,TLRLCVQSTHV,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,9657.4,5423.7,6275.13,4642.816820403671,6367.344637406844,10467.335317603565\nHLA-A*02:01,YDFAFRDLCIV,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,9827.2,5663.8,6605.64,8537.612365776004,9076.15995019165,8472.794847172467\nHLA-A*02:01,YDFAFRDLCI,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,9356.6,8596.9,7051.13,5783.470457750719,8886.639869166393,6900.1844605167835\nHLA-A*02:01,LLIRCINCQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,17771.5,18562.4,7167.67,6194.734449463747,6730.359385664998,9156.309395339307\nHLA-A*02:01,TLEQQYNKPL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,8846.4,9944.9,7526.68,8247.187903778167,7724.006369247068,7934.689593833418\nHLA-A*02:01,LMGTLGIVC,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,6752.4,7936.9,7548.2,5956.836085965389,7680.189836417857,6188.5104047798995\nHLA-A*02:01,SEYRHYCYSV,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,7213.2,7818.5,7961.17,2208.7935464187963,2195.755402862796,3869.8187165363624\nHLA-A*02:01,SLYGTTLEQQ,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,13853.8,14694.2,8398.45,4484.348332784442,5169.1361410017635,6238.559002635688\nHLA-A*02:01,TIHDIILECVY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,16111.2,13565.7,8436.97,18480.247525559542,17254.18407367307,16052.805885011456\nHLA-A*02:01,DLCTELQTTI,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,13562.4,11451.5,8439.62,10315.064570869743,7829.124137416472,8372.603499441011\nHLA-A*02:01,LLIRCINC,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,14592.1,18081.7,9263.13,5840.7933342989,7219.689803403051,8153.439741505592\nHLA-A*02:01,KPLCDLLIRCI,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,21675.5,24361.3,9568.74,11669.912353975344,12963.024986499122,6933.274830652026\nHLA-A*02:01,KQQLLRREV,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,12405.4,12030.3,10473.71,11151.684959687818,13378.011377737554,9381.644580361364\nHLA-A*02:01,QLLRREVYDF,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,13898.1,12025.3,11086.96,11557.492136930747,8072.482127910267,9421.217765296751\nHLA-A*02:01,RLECVYCKQQL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,7444.6,6179.7,11254.96,6057.102203953418,7857.069310594371,6712.316098963371\nHLA-A*02:01,LLIRCINCQK,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,16704.8,17019.5,11948.67,8211.356112912066,8409.215012137936,7444.559187973554\nHLA-A*02:01,KCLKFYSKI,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,17601.4,16904.7,12145.36,5452.758713298393,5650.7759692530835,5573.756294555697\nHLA-A*02:01,ILECVYCK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,28087.3,23841.6,13210.86,12045.85456890354,13370.493728100842,21677.317790126628\nHLA-A*02:01,LLRREVYDFAF,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,15235.6,13611.5,13943.72,10421.877091861914,9958.824701602422,8713.581563985184\nHLA-A*02:01,HLDKKQRFHNI,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,17238.0,13408.9,13960.33,2299.22418049324,1599.8286874944306,3929.7162390308467\nHLA-A*02:01,ELQTTIHEIIL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,17333.4,13077.5,14052.78,10684.042104244922,9762.87125737261,12016.088103237638\nHLA-A*02:01,YNKPLCDLLI,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,22341.3,20470.9,14169.12,10628.921221182429,11308.44710150331,6290.237739071789\nHLA-A*02:01,SEYRHYCYSL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,13026.1,15061.2,14259.85,5811.169006151247,5824.826979669005,10007.41998819825\nHLA-A*02:01,SLYGTTLEQQY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,18915.2,13566.9,14538.09,9040.57917762621,9154.051955183671,7841.014786675651\nHLA-A*02:01,QQYNKPLCDLL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,13660.3,7395.5,14733.66,7881.58844606867,10235.307999333174,9186.411917724126\nHLA-A*02:01,YAVCDKCLKF,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,18867.1,18812.7,15271.88,14274.15507299491,16838.86644164798,14951.32987603311\nHLA-A*02:01,VYRDGNPYAV,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,18526.3,19492.7,15454.89,4585.286102298362,7438.690507727205,5895.185063371625\nHLA-A*02:01,RTLEDLLM,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,19711.8,18336.2,15875.55,2484.2089733460502,2163.61486135186,6388.379531010197\nHLA-A*02:01,ISEYRHYCYSV,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,22763.2,17417.3,16198.28,2894.512011167368,2426.831196366345,5418.379048304882\nHLA-A*02:01,CMSCCRSSRT,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,12198.2,13951.6,16991.94,10535.60112853321,10118.68679835581,14725.56271574406\nHLA-A*02:01,DLYCYEQL,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,17409.5,16955.4,17632.1,8137.1463906061745,7442.693714332056,18847.28762692309\nHLA-A*02:01,TTLEQQYNKPL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,16075.4,9390.2,17677.19,10436.028647722613,7351.251124982511,9772.915561571059\nHLA-A*02:01,YSLYGTTL,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,20908.9,19223.6,17993.27,2281.09855045255,1766.0101648825876,6784.524748974714\nHLA-A*02:01,LECVYCKQQLL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,18246.4,11673.4,19398.9,15988.749501063316,17994.289200286003,22286.800243045924\nHLA-A*02:01,FQDPQERPRKL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,23015.3,17750.8,20790.04,6434.987943781,7658.949352552701,7701.4660952873155\nHLA-A*02:01,DLLIRCINCQK,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,33080.9,32321.3,20836.43,15381.046882179076,16312.850147876072,12752.31784017157\nHLA-A*02:01,YCYSVYGT,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,30982.1,30513.7,20902.36,6802.646468665559,5963.458511143889,11849.150764435824\nHLA-A*02:01,HVDIRTLEDLL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,26313.3,20053.4,21135.7,11756.82545898115,9757.506531027086,8429.851731645947\nHLA-A*02:01,QQLLRREVYDF,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,30678.9,27432.3,22055.0,19048.964668808807,18004.958876280023,20164.68452413133\nHLA-A*02:01,QYNKPLCDLLI,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,35465.9,31893.7,23709.1,12473.319358348526,16566.3994972223,15278.271757940349\nHLA-A*02:01,GIVCPICSQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,24846.0,29028.1,24415.38,16340.00360103559,17338.576062648906,18546.03609115097\nHLA-A*02:01,YSKISEYRHYC,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,19293.2,16011.7,24619.36,20768.38521022667,21379.50661678288,19188.252615464804\nHLA-A*02:01,LQPETTDLY,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,24655.3,24832.0,25371.14,15306.476435863176,19334.568885193483,15652.590707128582\nHLA-A*02:01,KISEYRHYCYS,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,22367.7,20249.9,25555.72,9021.712561571116,10503.08016267608,14106.538410439864\nHLA-A*02:01,QLNDSSEEE,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,27846.4,29819.8,28772.62,14869.244790561152,16809.168241559193,12204.426798386134\nHLA-A*02:01,CVYCKQQL,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,22673.2,21594.1,28998.27,13189.078881145795,14659.685439640254,21627.67462792852\nHLA-A*02:01,QQLLRREVYD,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,38188.5,37815.6,30968.04,22971.137238845695,25162.71603898916,20481.405031208604\nHLA-A*02:01,VQSTHVDI,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,34280.8,31479.2,31123.22,14870.238291291284,14288.133752706222,23077.704869215453\nHLA-A*02:01,LCVQSTHVDIR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,37863.9,36133.6,34584.06,21318.04847828535,23000.069879246737,25881.846850535672\nHLA-A*02:01,PQLCTELQTTI,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,14066.9,13293.0,8717.3,13500.485965757342,15988.82079411965,13075.008237186474\nHLA-A*02:01,IILECVYCKQQ,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,33495.1,30770.0,18479.01,12839.402238388291,16496.963366211512,13032.423575407904\nHLA-A*02:01,STHVDIRTL,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,21370.0,21277.7,13064.73,13081.25664250346,11367.70054316108,14121.00018631782\nHLA-A*02:01,GIVCPICS,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,34852.3,34494.7,31844.36,16979.390982186476,17001.090435871018,22019.158545689672\nHLA-A*03:01,RCMSCCRSSR,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,338.5,229.2,274.07,2146.151895876547,2678.6392705485628,1852.490604772033\nHLA-A*03:01,KISEYRHYCY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,421.4,625.5,572.58,508.5271761850045,198.21151382809373,295.13399784149385\nHLA-A*03:01,CMSCCRSSR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,890.9,665.7,711.79,698.3657242942412,2116.4394014354266,1511.9583619970472\nHLA-A*03:01,CLKFYSKISEY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,894.8,1363.0,1540.97,2095.081375326685,5205.889344177251,5802.133969881049\nHLA-A*03:01,DIILECVYCK,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1882.1,2341.3,1829.27,6024.303744160254,4179.40804463089,5637.158934176868\nHLA-A*03:01,DLLIRCINCQK,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,1145.9,2405.2,2443.06,1671.6915196553628,1513.4730710112726,1936.349688276893\nHLA-A*03:01,CVYCKQQLLRR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,2237.6,2458.8,2848.84,95.13939639605283,126.24882041983204,176.5199522745506\nHLA-A*03:01,AHYNIVTFCCK,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,10306.1,7881.6,2918.19,942.6111512922834,2353.3178810853587,2133.4108950052528\nHLA-A*03:01,CIVYRDGNPY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1467.2,2175.4,3007.11,4834.8995768888535,4183.345336629446,3218.6861814278805\nHLA-A*03:01,NIRGRWTGR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,8301.7,8766.1,3240.31,2543.19939076268,4158.268367969763,2273.462019112702\nHLA-A*03:01,HYNIVTFCCK,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4049.9,4137.7,3307.86,2198.6174472625776,584.9455802587668,1084.223382567712\nHLA-A*03:01,LKFYSKISEY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,3145.9,4157.9,3753.5,9631.77876679148,6822.620760297989,6779.638719408612\nHLA-A*03:01,LGIVCPICSQK,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,1894.1,3028.7,4665.87,4029.238428165269,8264.699832136585,7763.458517971719\nHLA-A*03:01,MSCCRSSRTR,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4520.9,5366.9,4817.98,1521.4009021341074,947.6102090610506,861.4797598619441\nHLA-A*03:01,AFRDLCIVYR,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4587.1,3393.3,5085.86,4400.140655805826,6190.8345548087855,5804.855243630565\nHLA-A*03:01,SLYGTTLEQQ,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1800.8,2198.7,5755.56,2883.4793280106846,1957.5732183517416,3642.2699172915004\nHLA-A*03:01,FAFRDLCIVY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,6733.5,7682.8,6638.96,5603.9553562641795,4502.49950017559,4077.4811835858773\nHLA-A*03:01,ILECVYCK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,3159.6,4307.9,6671.15,890.273813982895,2692.586448413381,2374.3744309074427\nHLA-A*03:01,IVYRDGNPYA,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,7084.6,8807.6,7479.27,5575.002201934725,7304.863413406947,7168.783410726695\nHLA-A*03:01,YNIVTFCCK,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,7521.7,6165.1,7847.85,3252.6544931259614,1437.3016960840844,2674.2028326013715\nHLA-A*03:01,AFRDLCIVY,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,8917.6,9238.2,7946.88,9427.562999067719,10594.479488804085,10957.51729834966\nHLA-A*03:01,TIHDIILECVY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,7794.2,7411.9,8104.13,10238.983678331142,10535.356809094204,6687.412723396845\nHLA-A*03:01,YAVCDKCLK,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,7155.4,6883.9,8224.8,3821.649764439815,2424.348143472158,4209.584907778554\nHLA-A*03:01,KFYSKISEYRH,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,12156.0,13864.3,8573.91,8079.243506444573,13968.451334406172,17485.818462512172\nHLA-A*03:01,SEYRHYCYSLY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,16141.2,13640.5,8860.23,3824.429784223868,5840.156611897132,8804.176450141538\nHLA-A*03:01,CQKPLCPEEK,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,6276.4,6630.4,9041.61,7555.636156280687,9600.987179295393,11138.161583549549\nHLA-A*03:01,RLCVQSTHV,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,10478.1,15291.1,9250.01,9181.889033252728,12869.854545414255,13561.35189596395\nHLA-A*03:01,CMSCCRSSRTR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,4846.3,5407.0,10382.31,1255.8368120386338,5820.3946396026795,10020.255721723788\nHLA-A*03:01,FAFRDLCIVYR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,5944.0,6144.5,10464.76,2491.416241486951,3000.0494697373656,4183.038873089809\nHLA-A*03:01,CDKCLKFYSK,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,15320.0,14495.2,10654.87,4404.859869695959,2186.190658739226,2983.1005232107013\nHLA-A*03:01,TLEQQYNK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,8603.6,10288.9,10744.48,2689.6793224747407,5570.180711962954,10546.89805472796\nHLA-A*03:01,VYCKQQLLR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,16174.8,15505.3,11111.21,5567.253279410506,6462.855881066801,12767.768012515184\nHLA-A*03:01,YGTTLEQQYNK,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,8477.2,6768.1,11681.38,9249.895196629821,13146.110479915269,14859.272306690418\nHLA-A*03:01,WTGRCMSCCR,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,8888.0,10630.0,12285.99,8769.71255533588,8526.06828795749,9365.77933071704\nHLA-A*03:01,LLMGTLGIVC,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,25372.2,25636.3,12569.3,6640.963839857213,5425.959510050151,6041.896311538562\nHLA-A*03:01,KISEYRHY,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,12211.8,13743.6,12912.53,1745.7820230340333,5329.022384307455,5567.132124374539\nHLA-A*03:01,NIVTFCCK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,10964.7,13002.8,12935.6,1844.1544916120545,3736.831761832908,4925.956839391305\nHLA-A*03:01,KQRHLDKK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,14324.0,13835.5,12994.1,978.3974358896014,1889.583078871707,4983.341587821577\nHLA-A*03:01,RAHYNIVTF,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,11466.9,12923.0,13236.75,9593.774954796472,8075.12897822427,12654.867729913249\nHLA-A*03:01,AVCDKCLKF,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,18440.7,16946.0,13831.93,7889.603419329891,8646.709244506084,10614.6251828039\nHLA-A*03:01,ISEYRHYCY,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,15430.7,14319.7,14211.49,12713.65719886557,8762.981746819096,8380.195323653576\nHLA-A*03:01,KQRFHNIR,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,14637.8,13352.3,14664.95,7402.795328586102,17833.537842691203,16238.782362526514\nHLA-A*03:01,MSCCRSSRTRR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,11276.2,11875.2,15034.96,2449.284978813199,6777.043043756836,5270.466920037933\nHLA-A*03:01,TLHEYMLDL,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,19457.6,18168.3,15136.0,13478.837615421822,15576.67774404172,16919.82201004596\nHLA-A*03:01,RLCVQSTH,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,9516.8,10452.0,15734.3,5585.55143358981,11921.371380186743,15486.510604749796\nHLA-A*03:01,KQRFHNIRGRW,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,16747.9,20875.5,15845.69,17769.847494111054,19599.174139326293,20086.92059187255\nHLA-A*03:01,GTLGIVCPI,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,16237.1,14216.1,16709.72,12524.864051355848,12949.151893041524,13325.577079707613\nHLA-A*03:01,QLLRREVYDF,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,19300.3,20194.8,17676.8,14022.340680721682,20082.35074421348,18551.78613098005\nHLA-A*03:01,LLIRCINCQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,8157.2,11563.9,18800.89,12143.728454198936,9472.68106483512,9922.57257387812\nHLA-A*03:01,DLLMGTLGIV,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,23381.5,25829.0,19532.01,19685.621315083717,16714.310190655182,13696.421961249798\nHLA-A*03:01,EVYDFAFR,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,13702.2,15071.1,19611.21,1767.8732388699136,3858.152467319781,7339.085909331587\nHLA-A*03:01,MSCCRSSR,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,13412.8,17131.7,19626.06,4265.170604589931,9705.2678010256,11207.538346703797\nHLA-A*03:01,LRLCVQSTHV,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,21693.8,25703.0,19670.29,23725.232869921416,21818.993890530248,22001.21513133623\nHLA-A*03:01,STLRLCVQSTH,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,18133.0,16029.2,20787.57,6090.8665088837915,7790.524470676007,10694.070090191282\nHLA-A*03:01,QLLRREVY,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,20930.4,23909.5,21021.66,7983.111817600068,14737.649498897768,17877.926347847344\nHLA-A*03:01,LLRREVYDF,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,21305.1,22277.1,24721.05,15832.304015760437,19283.206536874794,20248.882622842077\nHLA-A*03:01,KCLKFYSKI,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,26463.2,30831.3,28007.8,21373.22114892818,26154.45167537219,26504.28315878981\nHLA-A*03:01,MSCCRSSRT,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,24516.5,27338.6,28748.97,15119.46430226626,19740.653965912104,24027.66602289625\nHLA-A*03:01,PLCPEEKQR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,36295.1,36868.6,34912.68,15986.350453537061,23361.048188657267,25231.38625942724\nHLA-A*11:01,AGQAEPDRA,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,41723.4,40700.5,32897.81,19487.655921806832,30992.293143074872,28645.275312916296\nHLA-A*11:01,CDKCLKFYSK,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,8273.7,6893.5,5287.81,1057.9586125305268,1930.9392149616751,1160.6200182226905\nHLA-A*11:01,CIVYRDGNPY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1549.9,1954.0,4175.53,1201.5119232425986,1523.6402337495904,1299.5845012037664\nHLA-A*11:01,CLKFYSKISEY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,7376.9,8920.5,19570.09,7637.329624943686,8607.709095106029,8762.37463560671\nHLA-A*11:01,CMSCCRSSR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,1592.3,1773.2,1842.96,488.0842002240792,1874.2298925866912,1782.2213109483835\nHLA-A*11:01,CMSCCRSSRTR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,8619.7,9396.2,12933.23,2050.42703281226,5002.305870894132,4060.2372663091705\nHLA-A*11:01,CQKPLCPEEK,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,5449.0,4837.6,4576.07,3343.3340164938622,4865.293587730572,2478.4329499518435\nHLA-A*11:01,CTELQTTIHDI,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,35925.5,35188.4,35050.81,22798.707627700576,28660.626685495805,29746.469768459243\nHLA-A*11:01,CVQSTHVDIR,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,2449.3,2115.8,3271.2,1291.8619090634695,4487.297919064697,3476.411583807726\nHLA-A*11:01,CVYCKQQL,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,31728.1,31864.3,39604.52,14035.357103872793,20154.58255974632,21930.36670288617\nHLA-A*11:01,CVYCKQQLLRR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,2403.7,2456.7,4831.97,93.83898846301727,390.7621648395705,423.73524575262087\nHLA-A*11:01,DEIDGPAG,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,48003.1,47382.8,46217.22,27792.657178297242,38917.54642211169,38973.699846921714\nHLA-A*11:01,DGNPYAVCDKC,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,40538.8,40097.4,36277.0,22644.889294346005,32958.07360819781,31155.150716482636\nHLA-A*11:01,DIILECVYCKQ,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,17383.4,25455.0,12219.31,15002.464793098225,21154.070358240133,25959.531548526524\nHLA-A*11:01,DKCLKFYSK,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,25328.9,27730.4,18143.61,5625.1027409749695,7487.455608354694,5159.269579738569\nHLA-A*11:01,ETTDLYCY,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,28056.6,27712.1,27009.93,3114.3400173612004,8754.660161221043,11449.502370931597\nHLA-A*11:01,ETTDLYCYE,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,16807.5,16298.1,23906.67,7638.863420046611,9078.933396460823,8752.514875295234\nHLA-A*11:01,EVYDFAFRD,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,33871.9,29166.3,22421.45,10000.986830999123,14992.82471561801,10471.836474467988\nHLA-A*11:01,FAFRDLCIVY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4890.6,5787.0,6695.51,2111.8075182587977,3764.62808001404,3468.643622885065\nHLA-A*11:01,FYSKISEYR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,7460.3,7323.0,11508.38,7598.6440074526,9063.00748065181,7878.682800473727\nHLA-A*11:01,GIVCPICS,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,36529.9,36951.7,37710.98,12178.449899354055,22401.8645937304,21862.68336374641\nHLA-A*11:01,GIVCPICSQKP,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,5977.6,9779.0,7427.01,19797.42036602359,18897.169929317624,20931.652420488208\nHLA-A*11:01,GNPYAVCDK,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,5427.2,9063.5,12577.73,3810.7391846558003,6521.732590493438,5437.6556973628\nHLA-A*11:01,GTLGIVCPI,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,5656.2,4368.8,11138.54,6941.08461334701,9651.926068364071,7345.191953283194\nHLA-A*11:01,GTTLEQQY,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,28459.9,28050.6,29129.4,4644.142127747263,11274.911093989678,12875.288108669034\nHLA-A*11:01,GTTLEQQYNKP,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,6917.7,9681.4,4733.19,14846.621207594035,14920.752522801706,15942.816579493032\nHLA-A*11:01,HYNIVTFCCK,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1212.9,1213.5,1042.44,986.2134889530138,1106.5693237358985,862.0328849786329\nHLA-A*11:01,IILECVYCKQ,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1574.5,3232.2,2186.62,3679.051824763417,5954.129636216291,4311.148633844881\nHLA-A*11:01,IRCINCQK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,16016.2,21088.2,18778.93,8480.580280892878,17388.291093062242,15253.674597972657\nHLA-A*11:01,IVCPICSQKP,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,2826.4,4960.7,2205.49,16508.530727223904,16974.72338982387,15775.722075404898\nHLA-A*11:01,KCLKFYSK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,15541.6,15933.7,6483.71,1688.5798741481885,5655.4332857194995,7313.001651474084\nHLA-A*11:01,KFYSKISEY,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,4277.8,4132.4,4210.14,4972.939038785257,9118.133117261548,8700.969290004108\nHLA-A*11:01,KFYSKISEYR,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1605.3,1633.4,2366.54,1137.0639999806385,2272.9980293324465,1729.8826861340624\nHLA-A*11:01,KFYSKISEYRH,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,18415.7,19655.2,25751.96,14699.295372019302,23992.417951230054,25590.848944466867\nHLA-A*11:01,KISEYRHY,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,17931.7,19198.5,15027.49,1522.6065110482944,4477.380176476408,5070.170946378111\nHLA-A*11:01,KISEYRHYC,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,20196.7,19172.7,11647.81,10281.016527436794,4944.240035832017,5751.106983853338\nHLA-A*11:01,KISEYRHYCYS,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,13337.5,14596.4,11597.26,4377.158306251542,7540.125987623304,7993.637624355907\nHLA-A*11:01,KLPQLCTE,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,38181.5,38757.1,41573.79,16633.422440105976,27104.274879078985,29385.089805161864\nHLA-A*11:01,KPLCDLLIR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,15434.3,15102.8,16968.61,8793.072708583251,11116.526219165933,11621.30038642246\nHLA-A*11:01,KQRFHNIR,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,21443.4,19892.5,25255.0,5787.74716083292,13307.040556423251,13540.427098056638\nHLA-A*11:01,KQRFHNIRGR,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4249.5,4445.0,12018.69,5409.382204732741,9711.896646958934,9812.403449787995\nHLA-A*11:01,KQRHLDKK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,23657.8,18900.6,11497.93,2586.227786574936,6783.142480139129,7998.383868129876\nHLA-A*11:01,LKFYSKISEYR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,11847.9,12677.9,15475.8,13037.329081950098,15505.198031189448,15286.376816878057\nHLA-A*11:01,MFQDPQERPRK,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,10202.0,9296.4,11308.67,1280.2721339000564,1711.5512873649725,919.0610016561909\nHLA-A*11:01,MSCCRSSR,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,8484.7,11909.6,13500.58,745.2206048255368,5216.293578749275,6881.621862690828\nHLA-A*11:01,MSCCRSSRT,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,22981.7,27596.0,23953.79,11060.567746376115,14862.260431497814,13589.971511163352\nHLA-A*11:01,MSCCRSSRTR,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1533.6,2308.9,1643.51,903.9767868061217,2074.1432121998073,1958.2968832103518\nHLA-A*11:01,NIRGRWTGR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,14493.5,17810.6,20180.34,5890.516113854318,10879.94574628364,12143.640015284895\nHLA-A*11:01,PAGQAEPDRA,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,46355.4,44968.7,41496.95,23063.434845277046,34165.494001505336,30850.645412976253\nHLA-A*11:01,PLCPEEKQR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,36829.9,37176.6,32709.34,16598.379289006047,25603.592946354336,24689.211771860093\nHLA-A*11:01,QTTIHDIILE,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,15820.5,17922.7,25662.12,5541.484079375929,11350.789872319578,9146.047895482036\nHLA-A*11:01,RDGNPYAVCDK,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,23425.8,22986.2,21483.84,2351.0427263159063,5694.4144232026865,5896.370227517439\nHLA-A*11:01,REVYDFAFR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,12364.8,10984.1,13509.78,2239.19763824527,5266.938322697817,4025.0188421705866\nHLA-A*11:01,RHYCYSLY,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,20679.4,18175.4,26264.11,11030.244842726494,20766.472612386984,22126.468640123534\nHLA-A*11:01,RLCVQSTHVD,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,36665.7,38873.9,31502.68,16439.925064143878,20797.42164828521,18900.914768442428\nHLA-A*11:01,RTLEDLLM,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,25073.4,25150.0,28533.91,8568.430817036431,17210.176195650718,16426.73627097718\nHLA-A*11:01,RTLEDLLMGT,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,10614.7,11547.7,17957.28,5427.7381364339035,11073.460570718235,8382.788506029578\nHLA-A*11:01,SCCRSSRTR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,18883.7,24145.6,26662.08,15485.848069987858,21890.10996332307,20287.36552220845\nHLA-A*11:01,SEYRHYCYS,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,23398.2,20096.9,25882.7,7242.909501939216,11946.170085596685,7814.238764311442\nHLA-A*11:01,SEYRHYCYSLY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,17141.7,11630.6,20957.16,4964.468160017936,10681.487881770356,11395.416258121184\nHLA-A*11:01,SKISEYRHYCY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,7955.9,12019.7,3460.45,8211.261537746985,13027.182753105479,13348.301499116104\nHLA-A*11:01,SLYGTTLEQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,1994.1,2518.8,5669.4,909.8688747359337,2583.0824601605514,1509.6991300998684\nHLA-A*11:01,SSEEEDEIDG,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,46388.6,45398.5,42990.49,21625.90201824093,30902.77595538252,30192.822903541386\nHLA-A*11:01,STHVDIRTLE,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,8180.6,10690.3,17396.74,2351.1514694241355,3626.466390078379,3108.056300285067\nHLA-A*11:01,STLRLCVQS,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,7311.6,8398.6,11216.42,2592.2153659165165,4993.4326595202865,3624.604111059481\nHLA-A*11:01,STLRLCVQSTH,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,11210.0,8508.4,21889.29,3677.2775077249353,6870.0783172152405,6505.549577252196\nHLA-A*11:01,TFCCKCDSTLR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,22933.5,20909.8,22830.75,2175.4325392765772,7119.048522834347,6740.3866863709045\nHLA-A*11:01,TGRCMSCCR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,11664.8,14196.3,18365.21,7303.337786462746,12676.74558320295,12030.401042609656\nHLA-A*11:01,TIHDIILEC,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,27744.8,24875.3,17265.29,14935.346863415803,15595.43261271422,13548.678252515494\nHLA-A*11:01,TIHDIILECVY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,3895.1,2672.8,4811.78,2174.707726321328,6132.897803971964,4242.112406686189\nHLA-A*11:01,TTDLYCYEQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,10077.9,11981.9,10163.91,2173.431967459284,5313.533654592557,3318.6427623216523\nHLA-A*11:01,TTDLYCYEQLN,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,35658.6,32336.7,29490.29,10811.590918534217,14704.748465398878,12049.104505257204\nHLA-A*11:01,TTIHDIILE,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,5708.2,6682.6,9740.68,2152.1711328398355,3144.269092022183,2404.548142607724\nHLA-A*11:01,TTIHDIILEC,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,20096.2,21278.2,15893.42,13869.607493932192,15474.210571434014,13922.373319334065\nHLA-A*11:01,TTLEQQYNKP,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1705.9,1893.4,709.82,14394.018049627624,10986.393844503771,8565.266976037352\nHLA-A*11:01,TTLEQQYNKPL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,16816.0,14604.8,7924.39,16751.873641029164,19942.592102160306,15317.980158638191\nHLA-A*11:01,VTFCCKCDS,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,15433.2,18465.4,20534.74,6336.414626546366,11300.937689146247,10910.325644877645\nHLA-A*11:01,VYCKQQLLR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,8470.8,9908.1,6611.43,4227.101931069413,8653.539142274069,6781.498224130493\nHLA-A*11:01,YSKISEYR,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,10650.0,13959.6,19078.77,5073.774072307504,9928.3530788146,7545.033627566686\nHLA-A*11:01,YSKISEYRH,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,23416.7,21120.8,19254.0,13898.256380507719,20758.7157315749,17581.880315582115\nHLA-A*11:01,YSLYGTTLEQ,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4900.2,5991.6,16898.07,5990.40340289689,8845.80405880307,7233.2704224236295\nHLA-A*24:02,RWTGRCMSCC,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,12646.1,17842.8,9956.13,2327.51469183589,4316.3125469468705,4864.483113368513\nHLA-A*24:02,IVYRDGNPYA,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,30398.7,29113.0,27617.81,14403.729281193133,19063.353259695068,18682.20111462103\nHLA-A*24:02,LLMGTLGIVC,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,41830.1,38467.6,31101.01,8535.885812364837,10566.25636941926,17589.22672256815\nHLA-A*24:02,CDSTLRLCV,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,37420.4,34181.2,35314.62,17019.067445824192,19265.427300303934,21144.969105576147\nHLA-A*24:02,RWTGRCMSC,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,8183.3,11769.2,4275.6,1252.129180436395,3292.725140390301,2210.2280345417207\nHLA-A*24:02,KKQRFHNIRG,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,40321.0,40204.7,39485.14,18274.05257410025,19301.011905667063,23056.192391966866\nHLA-A*24:02,DFAFRDLCIV,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,19689.7,20285.9,20730.97,7285.596677999693,11784.91100066803,11840.785810576412\nHLA-A*24:02,YCYSLYGTTL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,268.9,342.4,134.87,4045.001672952072,6291.939345660424,4901.339636410948\nHLA-A*24:02,CYEQLNDSSE,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,33221.5,33039.4,27771.52,10510.647969789583,12029.17312200454,19754.559503290977\nHLA-A*24:02,EYMLDLQPE,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,24906.0,25762.3,13459.45,2627.735370486799,4674.805189252864,4188.678260213211\nHLA-A*24:02,LYCYEQLNDS,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,33078.8,30784.3,26307.61,7872.069624051399,9035.551260635362,21149.095121956765\nHLA-A*24:02,EYMLDLQPET,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,27655.5,25809.7,21242.99,4744.62409060371,7767.151151194414,9194.004919948033\nHLA-A*24:02,HYCYSLYGTT,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,13921.3,13727.3,9117.55,4160.420166078235,7680.8767122373865,6305.814446087444\nHLA-A*24:02,CYEQLNDSS,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,31067.7,31256.9,25011.36,7807.102866221178,8956.773474358984,12966.768480464609\nHLA-A*24:02,QQYNKPLCDL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,8431.5,11155.1,14362.04,14481.133400705226,18341.356262682217,22837.715700467557\nHLA-A*24:02,NPYAVCDKCL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,10467.5,15642.6,18381.11,19232.4395447782,22612.536963433042,29874.648854998188\nHLA-A*24:02,LYGTTLEQQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,28990.4,26818.0,14067.07,3472.181258108957,5467.901453685499,7824.765679419533\nHLA-A*24:02,LDKKQRFHNI,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,39331.7,33720.9,24254.75,10760.48301466704,13079.386571515302,20192.747930564517\nHLA-A*24:02,LRREVYDFAF,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,15282.3,13888.3,9032.62,3396.447163543621,4625.537091423434,6698.489881155511\nHLA-A*24:02,RCINCQKPL,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,37134.8,37655.9,23544.46,10265.641030587249,14067.48892259074,21094.348221815093\nHLA-A*24:02,TLEDLLMGTL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,30399.7,28756.1,28965.04,11968.230862651542,14649.88784954922,26051.10082437397\nHLA-A*24:02,FYSKISEYR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,9771.4,9577.7,7663.92,2517.1746850747663,4142.750766266233,4009.092735139147\nHLA-A*24:02,QLLRREVYDF,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,8433.7,9138.2,6034.56,1310.3032659857092,2974.8412189658825,5017.241639230572\nHLA-A*24:02,RHLDKKQRF,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,5098.1,9996.1,2766.13,624.1467030641506,1176.3738893945574,2251.6225660901514\nHLA-A*24:02,VDIRTLEDL,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,31430.5,32842.3,26183.23,14396.492717335888,17405.885082392062,21649.131058989424\nHLA-A*24:02,VDIRTLEDLL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,30708.1,31901.9,23643.51,16245.343259640471,19853.677601398125,25142.400644843307\nHLA-A*24:02,DPQERPRKL,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,45927.6,44371.8,40129.47,18731.668651537042,20882.49224815732,25848.272337163253\nHLA-A*24:02,LCVQSTHVDIR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,44114.8,42730.3,37951.25,23082.196613387925,25362.85581135644,34825.67527716447\nHLA-A*24:02,TFCCKCDSTLR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,26034.1,32333.2,29078.7,16550.591567148018,19665.39300949906,24965.192500271387\nHLA-A*24:02,EYMLDLQPETT,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,33132.1,29578.5,26569.63,2314.188613306457,4274.95201592503,6762.439605732876\nHLA-A*24:02,RTLEDLLMGTL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,23833.8,19335.6,26563.04,7132.363209150767,10084.0069546467,16049.337486401597\nHLA-A*24:02,CYSLYGTT,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,26920.4,27792.6,18030.1,2537.044719367281,3859.837211588529,12517.958554324397\nHLA-A*24:02,SEYRHYCYSLY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,10564.2,16609.5,9611.69,10111.367483128466,15044.378332556398,22937.54791454256\nHLA-A*24:02,QQLLRREVYDF,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,13218.7,13841.1,18489.62,2628.5987247809007,4844.5063343588,14465.954769608485\nHLA-A*24:02,REVYDFAFRDL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,8446.6,6586.4,14573.68,11115.580407908974,14349.687826574016,11849.240258871943\nHLA-A*24:02,ECVYCKQQLLR,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,38119.6,37341.5,31379.52,25523.90440628299,26560.196552286587,35494.326036429564\nHLA-A*24:02,YDFAFRDLCIV,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,21789.1,26267.5,16421.16,18867.044391276177,22701.055813824718,30793.33695416457\nHLA-A*24:02,GNPYAVCDKCL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,26546.4,27740.6,34978.46,16938.861890850312,20381.577819995644,27403.483742458222\nHLA-A*24:02,CLKFYSKISEY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,24856.2,23225.4,37524.57,26914.97977266992,27765.476529918862,40336.140749917526\nHLA-A*24:02,ISEYRHYCYSL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,2833.5,2355.4,3113.45,8899.099085509657,11589.442806600184,19249.411082425595\nHLA-A*24:02,EVYDFAFRDLC,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,24585.8,25415.6,22725.03,20922.26895286488,23293.65328310457,32126.65772282363\nHLA-A*24:02,YYCYSVYGTTL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,216.6,202.8,604.7,111.89359456203823,301.8856522689136,427.51753151815245\nHLA-A*24:02,YYCYSVYGTT,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,7525.9,7137.3,1947.24,1525.68790783909,3382.4220501099353,3628.565531893391\nHLA-A*24:02,SEYRYYCYSV,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1054.0,2190.0,2876.35,7275.177471932455,11336.546020412681,15400.903755077848\nHLA-A*24:02,YYCYSVYGT,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,7889.0,7567.3,6728.99,2629.136425290338,4638.798012449303,6811.211221578079\nHLA-A*24:02,EYRYYCYSVY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,7795.8,7867.7,7892.64,991.8631191941313,1866.2659332119367,3302.622051967073\nHLA-A*24:02,VYGTTLEQQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,28208.5,24869.1,10684.19,3000.441701810754,4928.23580615368,6186.455574963915\nHLA-B*07:02,GPAGQAEPDRA,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,14048.1,16452.5,15440.19,8439.613951352983,7297.677323820672,19040.17544763588\nHLA-B*07:02,LPQLCTELQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,11909.2,16362.1,8416.83,5938.424953192735,4843.336291632932,6641.374936153506\nHLA-B*07:02,TPTLHEYMLDL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,3949.5,5038.2,2848.26,768.4217841785478,1302.9485673908416,2715.12986266422\nHLA-B*07:02,EPDRAHYNI,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,4371.2,5465.6,10667.68,7408.608322233509,7762.451996891709,12742.865461393372\nHLA-B*07:02,RCINCQKPL,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,5829.8,10020.6,10881.59,3948.634140996812,3630.282590768997,7602.4332938880725\nHLA-B*07:02,LPQLCTELQTT,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,11770.8,12514.7,11799.65,4419.9396490359295,3282.740576315066,8159.9436921048045\nHLA-B*07:02,YSLYGTTL,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,15487.0,12208.5,23550.32,8041.795089062719,5743.020502832607,14662.043113555439\nHLA-B*07:02,GPAGQAEPD,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,18549.3,22758.7,10408.18,5409.346556132906,4792.710922930783,6776.763925503233\nHLA-B*07:02,CVYCKQQL,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,24775.7,17574.2,29636.79,9462.35507254124,8425.700072279475,13477.150061657196\nHLA-B*07:02,NPYAVCDKCL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,2833.6,2857.8,3554.75,1510.460666972149,1788.5132709715024,3572.3129531873565\nHLA-B*07:02,SEYRHYCYSL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,16944.8,13971.8,23377.94,11601.694482651614,11169.815939819517,14290.891903899379\nHLA-B*07:02,QPETTDLYCY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,26188.3,26393.7,28506.75,18330.593632548607,14625.753154498847,15556.322880163614\nHLA-B*15:01,DLQPETTDLY,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,1985.9,2981.1,1131.31,3901.182659640288,4483.832141070158,1783.8474540063705\nHLA-B*15:01,SEYRHYCYSL,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,4282.6,5063.9,4628.91,3965.6009998798,6776.729747724399,4161.584395180935\nHLA-B*15:01,RLCVQSTHV,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,3307.3,3976.7,1399.08,991.6851693379456,782.1412939991621,668.6607439441259\nHLA-B*15:01,LMGTLGIVC,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,10940.6,9196.3,2283.83,1163.2972888083507,2451.3715853407625,1714.1131992773676\nHLA-B*15:01,SLYGTTLEQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,7925.2,8401.9,4879.94,3679.8327331602363,4346.2989638983645,4685.072392552245\nHLA-B*15:01,LLIRCINCQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,15785.5,17114.1,7808.63,4894.483493790985,6755.436663280311,4899.414494827697\nHLA-B*15:01,LEQQYNKPL,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,10517.3,11395.1,3724.69,2763.669943063591,3815.3322109175483,1596.0868186309488\nHLA-B*15:01,QAEPDRAHY,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,5004.4,6088.1,3473.69,2522.379797450491,4143.677865503523,3351.3820256185163\nHLA-B*15:01,GTLGIVCPI,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,3304.0,3917.1,6258.59,4511.577434755824,6087.242606478117,4170.298962941786\nHLA-B*15:01,GIVCPICSQ,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,15376.8,18112.8,6329.0,6001.218223650592,7331.933504408098,6918.57362795794\nHLA-B*15:01,QLNDSSEEE,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,32349.3,31283.9,17375.87,13513.985673522251,15056.571198816013,20926.329416096352\nHLA-B*15:01,CMSCCRSSR,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,17864.8,16099.6,15117.01,6856.540480813101,7400.988988509392,10515.678390013729\nHLA-B*15:01,MSCCRSSRT,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,21090.0,24486.0,20596.6,9843.743232345192,12450.37128504561,16894.533840304917\nHLA-B*15:01,RSSRTRRETQ,10,100.0,nb,,NON-CONFIDENTIAL,100000.0,24184.0,26577.4,17711.83,5047.551635684806,7137.601056542866,3896.277724665923\nHLA-B*15:01,FAFRDLCIV,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,4061.7,4421.6,11152.53,4739.977927422689,6404.1837745263065,6853.42945161375\nHLA-B*15:01,LIRCINCQK,9,100.0,nb,,NON-CONFIDENTIAL,100000.0,20383.5,18954.9,15499.94,6841.424715671967,9989.585883938127,12415.662202950058\nHLA-B*15:01,LMGTLGIVCPI,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,3804.9,5684.1,5377.35,300.37054166544965,480.78461830160103,1357.4092613559171\nHLA-B*15:01,REVYDFAF,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,7893.8,8117.0,4471.38,319.6990871014404,329.72918502561424,730.5215018668864\nHLA-B*15:01,VQSTHVDIRTL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,7355.2,4917.9,11578.08,871.4412256872432,997.933215443048,2237.2053353204565\nHLA-B*15:01,LIRCINCQKPL,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,4240.5,6136.4,8293.61,1327.251814535686,2510.971864538838,5517.449886787486\nHLA-B*15:01,RLCVQSTH,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,15171.7,13984.7,17594.75,1230.1589376680622,1082.1936270282201,1195.1841288138844\nHLA-B*15:01,FYSKISEYRHY,11,100.0,nb,,NON-CONFIDENTIAL,100000.0,5921.2,6033.2,5410.21,6511.682318012961,7712.374659604434,10691.632383740738\nHLA-B*15:01,TLGIVCPI,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,20397.2,18270.7,18481.22,3542.624250266721,3264.328781981469,4857.917071540425\nHLA-B*15:01,RTRRETQL,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,20971.0,19528.8,24587.69,3000.259755732219,5335.501010276689,5763.350117362581\nHLA-B*15:01,MSCCRSSR,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,27739.7,31263.0,33102.03,9088.570284361696,10481.258964209477,17024.63267992658\nHLA-B*15:01,CVYCKQQL,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,26148.1,21053.8,30864.02,3813.192639109835,7495.5400912868445,8313.235518311285\nHLA-B*15:01,KQRHLDKK,8,100.0,nb,,NON-CONFIDENTIAL,100000.0,29028.1,28410.4,29631.98,9377.071195923309,11672.093401923024,19848.68824116367\n"
  },
  {
    "path": "test/data/master_affinity_fixture_config.json",
    "content": "{\"_network\": null, \"fit_info\": [], \"hyperparameters\": {\"activation\": \"tanh\", \"allele_amino_acid_encoding\": \"BLOSUM62\", \"allele_dense_layer_sizes\": [], \"batch_normalization\": true, \"data_dependent_initialization_method\": null, \"dense_layer_l1_regularization\": 0.0, \"dense_layer_l2_regularization\": 0.0, \"dropout_probability\": 0.0, \"early_stopping\": true, \"init\": \"glorot_uniform\", \"layer_sizes\": [4], \"learning_rate\": null, \"locally_connected_layers\": [{\"activation\": \"tanh\", \"filters\": 2, \"kernel_size\": 2}], \"loss\": \"custom:mse_with_inequalities\", \"max_epochs\": 500, \"min_delta\": 0.0, \"minibatch_size\": 128, \"num_outputs\": 1, \"optimizer\": \"rmsprop\", \"output_activation\": \"sigmoid\", \"patience\": 20, \"peptide_allele_merge_activation\": \"\", \"peptide_allele_merge_method\": \"multiply\", \"peptide_dense_layer_sizes\": [], \"peptide_encoding\": {\"alignment_method\": \"pad_middle\", \"left_edge\": 4, \"max_length\": 15, \"right_edge\": 4, \"vector_encoding_name\": \"BLOSUM62\"}, \"random_negative_affinity_max\": 50000.0, \"random_negative_affinity_min\": 20000.0, \"random_negative_binder_threshold\": null, \"random_negative_constant\": 0, \"random_negative_distribution_smoothing\": 0.0, \"random_negative_lengths\": [8, 9, 10, 11, 12, 13, 14, 15], \"random_negative_match_distribution\": true, \"random_negative_method\": \"recommended\", \"random_negative_output_indices\": null, \"random_negative_rate\": 0.0, \"topology\": \"feedforward\", \"train_data\": {}, \"validation_split\": 0.1}, \"network_json\": \"{\\\"class_name\\\": \\\"Functional\\\", \\\"config\\\": {\\\"name\\\": \\\"predictor\\\", \\\"trainable\\\": true, \\\"layers\\\": [{\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"InputLayer\\\", \\\"config\\\": {\\\"batch_input_shape\\\": [null, 15, 21], \\\"dtype\\\": \\\"float32\\\", \\\"sparse\\\": false, \\\"ragged\\\": false, \\\"name\\\": \\\"peptide\\\"}, \\\"registered_name\\\": null, \\\"name\\\": \\\"peptide\\\", \\\"inbound_nodes\\\": []}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"LocallyConnected1D\\\", \\\"config\\\": {\\\"name\\\": \\\"lc_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"filters\\\": 2, \\\"kernel_size\\\": [2], \\\"strides\\\": [1], \\\"padding\\\": \\\"valid\\\", \\\"data_format\\\": \\\"channels_last\\\", \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null, \\\"implementation\\\": 1}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 15, 21]}, \\\"name\\\": \\\"lc_0\\\", \\\"inbound_nodes\\\": [[[\\\"peptide\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Flatten\\\", \\\"config\\\": {\\\"name\\\": \\\"flattened_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"data_format\\\": \\\"channels_last\\\"}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 14, 2]}, \\\"name\\\": \\\"flattened_0\\\", \\\"inbound_nodes\\\": [[[\\\"lc_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_early\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 28]}, \\\"name\\\": \\\"batch_norm_early\\\", \\\"inbound_nodes\\\": [[[\\\"flattened_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 4, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 28]}, \\\"name\\\": \\\"dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_early\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 4]}, \\\"name\\\": \\\"batch_norm_0\\\", \\\"inbound_nodes\\\": [[[\\\"dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"output\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 1, \\\"activation\\\": \\\"sigmoid\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 4]}, \\\"name\\\": \\\"output\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_0\\\", 0, 0, {}]]]}], \\\"input_layers\\\": [[\\\"peptide\\\", 0, 0]], \\\"output_layers\\\": [[\\\"output\\\", 0, 0]]}, \\\"keras_version\\\": \\\"2.17.0\\\", \\\"backend\\\": \\\"tensorflow\\\"}\", \"network_weights\": null, \"network_weights_loader\": null, \"prediction_cache\": null}"
  },
  {
    "path": "test/data/master_affinity_fixture_predictions.json",
    "content": "{\"peptides\": [\"AAAAAAAAA\", \"CCCCCCCCC\", \"ACDEFGHIK\"], \"predictions\": [223.6097539934542, 223.6097539934542, 223.6097539934542]}"
  },
  {
    "path": "test/data/master_densenet_fixture_config.json",
    "content": "{\"hyperparameters\": {\"peptide_dense_layer_sizes\": [3], \"layer_sizes\": [4, 3, 2], \"activation\": \"tanh\", \"output_activation\": \"sigmoid\", \"dropout_probability\": 0.7, \"batch_normalization\": true, \"locally_connected_layers\": [], \"topology\": \"with-skip-connections\", \"num_outputs\": 1, \"allele_amino_acid_encoding\": \"BLOSUM62\", \"allele_dense_layer_sizes\": [], \"peptide_encoding\": {\"vector_encoding_name\": \"BLOSUM62\", \"alignment_method\": \"pad_middle\", \"left_edge\": 4, \"right_edge\": 4, \"max_length\": 15}, \"peptide_allele_merge_method\": \"multiply\", \"peptide_allele_merge_activation\": \"\", \"dense_layer_l1_regularization\": 0.001, \"dense_layer_l2_regularization\": 0.0, \"init\": \"glorot_uniform\", \"loss\": \"custom:mse_with_inequalities\", \"optimizer\": \"rmsprop\", \"learning_rate\": null, \"max_epochs\": 500, \"validation_split\": 0.1, \"early_stopping\": true, \"minibatch_size\": 128, \"data_dependent_initialization_method\": null, \"random_negative_affinity_min\": 20000.0, \"random_negative_affinity_max\": 50000.0, \"random_negative_output_indices\": null, \"random_negative_rate\": 0.0, \"random_negative_constant\": 0, \"random_negative_match_distribution\": true, \"random_negative_distribution_smoothing\": 0.0, \"random_negative_method\": \"recommended\", \"random_negative_binder_threshold\": null, \"random_negative_lengths\": [8, 9, 10, 11, 12, 13, 14, 15], \"patience\": 20, \"min_delta\": 0.0, \"train_data\": {}}, \"_network\": null, \"network_json\": \"{\\\"class_name\\\": \\\"Functional\\\", \\\"config\\\": {\\\"name\\\": \\\"predictor\\\", \\\"trainable\\\": true, \\\"layers\\\": [{\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"InputLayer\\\", \\\"config\\\": {\\\"batch_input_shape\\\": [null, 15, 21], \\\"dtype\\\": \\\"float32\\\", \\\"sparse\\\": false, \\\"ragged\\\": false, \\\"name\\\": \\\"peptide\\\"}, \\\"registered_name\\\": null, \\\"name\\\": \\\"peptide\\\", \\\"inbound_nodes\\\": []}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Flatten\\\", \\\"config\\\": {\\\"name\\\": \\\"flattened_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"data_format\\\": \\\"channels_last\\\"}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 15, 21]}, \\\"name\\\": \\\"flattened_0\\\", \\\"inbound_nodes\\\": [[[\\\"peptide\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"peptide_dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 3, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 315]}, \\\"name\\\": \\\"peptide_dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"flattened_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_early\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 3]}, \\\"name\\\": \\\"batch_norm_early\\\", \\\"inbound_nodes\\\": [[[\\\"peptide_dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 4, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 3]}, \\\"name\\\": \\\"dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_early\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 4]}, \\\"name\\\": \\\"batch_norm_0\\\", \\\"inbound_nodes\\\": [[[\\\"dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dropout\\\", \\\"config\\\": {\\\"name\\\": \\\"dropout_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"rate\\\": 0.30000000000000004, \\\"noise_shape\\\": null, \\\"seed\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 4]}, \\\"name\\\": \\\"dropout_0\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Concatenate\\\", \\\"config\\\": {\\\"name\\\": \\\"concatenate\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": -1}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [[null, 3], [null, 4]]}, \\\"name\\\": \\\"concatenate\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_early\\\", 0, 0, {}], [\\\"dropout_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"dense_1\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 3, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 7]}, \\\"name\\\": \\\"dense_1\\\", \\\"inbound_nodes\\\": [[[\\\"concatenate\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_1\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 3]}, \\\"name\\\": \\\"batch_norm_1\\\", \\\"inbound_nodes\\\": [[[\\\"dense_1\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dropout\\\", \\\"config\\\": {\\\"name\\\": \\\"dropout_1\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"rate\\\": 0.30000000000000004, \\\"noise_shape\\\": null, \\\"seed\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 3]}, \\\"name\\\": \\\"dropout_1\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_1\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Concatenate\\\", \\\"config\\\": {\\\"name\\\": \\\"concatenate_1\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": -1}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [[null, 4], [null, 3]]}, \\\"name\\\": \\\"concatenate_1\\\", \\\"inbound_nodes\\\": [[[\\\"dropout_0\\\", 0, 0, {}], [\\\"dropout_1\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"dense_2\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 2, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 7]}, \\\"name\\\": \\\"dense_2\\\", \\\"inbound_nodes\\\": [[[\\\"concatenate_1\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_2\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 2]}, \\\"name\\\": \\\"batch_norm_2\\\", \\\"inbound_nodes\\\": [[[\\\"dense_2\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dropout\\\", \\\"config\\\": {\\\"name\\\": \\\"dropout_2\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"rate\\\": 0.30000000000000004, \\\"noise_shape\\\": null, \\\"seed\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 2]}, \\\"name\\\": \\\"dropout_2\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_2\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"output\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 1, \\\"activation\\\": \\\"sigmoid\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 2]}, \\\"name\\\": \\\"output\\\", \\\"inbound_nodes\\\": [[[\\\"dropout_2\\\", 0, 0, {}]]]}], \\\"input_layers\\\": [[\\\"peptide\\\", 0, 0]], \\\"output_layers\\\": [[\\\"output\\\", 0, 0]]}, \\\"keras_version\\\": \\\"2.17.0\\\", \\\"backend\\\": \\\"tensorflow\\\"}\", \"network_weights\": null, \"network_weights_loader\": null, \"fit_info\": [], \"prediction_cache\": null}"
  },
  {
    "path": "test/data/master_densenet_fixture_predictions.json",
    "content": "{\"peptides\": [\"SYFPEITHI\", \"AAAAAAAAA\", \"CCCCCCCCC\", \"DDDDDDDDD\"], \"predictions\": [3600.1894226888244, 3600.1894226888244, 3600.1894226888244, 3600.1894226888244]}"
  },
  {
    "path": "test/data/master_multi_output_fixture_config.json",
    "content": "{\"hyperparameters\": {\"peptide_dense_layer_sizes\": [], \"layer_sizes\": [4], \"activation\": \"tanh\", \"output_activation\": \"sigmoid\", \"dropout_probability\": 0.0, \"batch_normalization\": false, \"locally_connected_layers\": [], \"topology\": \"feedforward\", \"num_outputs\": 3, \"allele_amino_acid_encoding\": \"BLOSUM62\", \"allele_dense_layer_sizes\": [], \"peptide_encoding\": {\"vector_encoding_name\": \"BLOSUM62\", \"alignment_method\": \"pad_middle\", \"left_edge\": 4, \"right_edge\": 4, \"max_length\": 15}, \"peptide_allele_merge_method\": \"multiply\", \"peptide_allele_merge_activation\": \"\", \"dense_layer_l1_regularization\": 0.001, \"dense_layer_l2_regularization\": 0.0, \"init\": \"glorot_uniform\", \"loss\": \"custom:mse_with_inequalities\", \"optimizer\": \"rmsprop\", \"learning_rate\": null, \"max_epochs\": 500, \"validation_split\": 0.1, \"early_stopping\": true, \"minibatch_size\": 128, \"data_dependent_initialization_method\": null, \"random_negative_affinity_min\": 20000.0, \"random_negative_affinity_max\": 50000.0, \"random_negative_output_indices\": null, \"random_negative_rate\": 0.0, \"random_negative_constant\": 0, \"random_negative_match_distribution\": true, \"random_negative_distribution_smoothing\": 0.0, \"random_negative_method\": \"recommended\", \"random_negative_binder_threshold\": null, \"random_negative_lengths\": [8, 9, 10, 11, 12, 13, 14, 15], \"patience\": 20, \"min_delta\": 0.0, \"train_data\": {}}, \"_network\": null, \"network_json\": \"{\\\"class_name\\\": \\\"Functional\\\", \\\"config\\\": {\\\"name\\\": \\\"predictor\\\", \\\"trainable\\\": true, \\\"layers\\\": [{\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"InputLayer\\\", \\\"config\\\": {\\\"batch_input_shape\\\": [null, 15, 21], \\\"dtype\\\": \\\"float32\\\", \\\"sparse\\\": false, \\\"ragged\\\": false, \\\"name\\\": \\\"peptide\\\"}, \\\"registered_name\\\": null, \\\"name\\\": \\\"peptide\\\", \\\"inbound_nodes\\\": []}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Flatten\\\", \\\"config\\\": {\\\"name\\\": \\\"flattened_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"data_format\\\": \\\"channels_last\\\"}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 15, 21]}, \\\"name\\\": \\\"flattened_0\\\", \\\"inbound_nodes\\\": [[[\\\"peptide\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 4, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 315]}, \\\"name\\\": \\\"dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"flattened_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"output\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 3, \\\"activation\\\": \\\"sigmoid\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 4]}, \\\"name\\\": \\\"output\\\", \\\"inbound_nodes\\\": [[[\\\"dense_0\\\", 0, 0, {}]]]}], \\\"input_layers\\\": [[\\\"peptide\\\", 0, 0]], \\\"output_layers\\\": [[\\\"output\\\", 0, 0]]}, \\\"keras_version\\\": \\\"2.17.0\\\", \\\"backend\\\": \\\"tensorflow\\\"}\", \"network_weights\": null, \"network_weights_loader\": null, \"fit_info\": [], \"prediction_cache\": null}"
  },
  {
    "path": "test/data/master_multi_output_fixture_predictions.json",
    "content": "{\"peptides\": [\"SYFPEITHI\", \"AAAAAAAAA\", \"CCCCCCCCC\", \"DDDDDDDDD\"], \"predictions\": [39441.12426571068, 39441.12426571068, 39441.12426571068, 39441.12426571068]}"
  },
  {
    "path": "test/data/master_pan_concat_fixture_config.json",
    "content": "{\"hyperparameters\": {\"allele_amino_acid_encoding\": \"BLOSUM62\", \"allele_dense_layer_sizes\": [3], \"peptide_dense_layer_sizes\": [3], \"peptide_allele_merge_method\": \"concatenate\", \"peptide_allele_merge_activation\": \"\", \"layer_sizes\": [5], \"activation\": \"tanh\", \"output_activation\": \"sigmoid\", \"dropout_probability\": 0.8, \"batch_normalization\": true, \"locally_connected_layers\": [{\"filters\": 2, \"activation\": \"tanh\", \"kernel_size\": 2}], \"topology\": \"feedforward\", \"num_outputs\": 1, \"peptide_encoding\": {\"vector_encoding_name\": \"BLOSUM62\", \"alignment_method\": \"pad_middle\", \"left_edge\": 4, \"right_edge\": 4, \"max_length\": 15}, \"dense_layer_l1_regularization\": 0.001, \"dense_layer_l2_regularization\": 0.0, \"init\": \"glorot_uniform\", \"loss\": \"custom:mse_with_inequalities\", \"optimizer\": \"rmsprop\", \"learning_rate\": null, \"max_epochs\": 500, \"validation_split\": 0.1, \"early_stopping\": true, \"minibatch_size\": 128, \"data_dependent_initialization_method\": null, \"random_negative_affinity_min\": 20000.0, \"random_negative_affinity_max\": 50000.0, \"random_negative_output_indices\": null, \"random_negative_rate\": 0.0, \"random_negative_constant\": 0, \"random_negative_match_distribution\": true, \"random_negative_distribution_smoothing\": 0.0, \"random_negative_method\": \"recommended\", \"random_negative_binder_threshold\": null, \"random_negative_lengths\": [8, 9, 10, 11, 12, 13, 14, 15], \"patience\": 20, \"min_delta\": 0.0, \"train_data\": {}}, \"_network\": null, \"network_json\": \"{\\\"class_name\\\": \\\"Functional\\\", \\\"config\\\": {\\\"name\\\": \\\"predictor\\\", \\\"trainable\\\": true, \\\"layers\\\": [{\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"InputLayer\\\", \\\"config\\\": {\\\"batch_input_shape\\\": [null, 15, 21], \\\"dtype\\\": \\\"float32\\\", \\\"sparse\\\": false, \\\"ragged\\\": false, \\\"name\\\": \\\"peptide\\\"}, \\\"registered_name\\\": null, \\\"name\\\": \\\"peptide\\\", \\\"inbound_nodes\\\": []}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"LocallyConnected1D\\\", \\\"config\\\": {\\\"name\\\": \\\"lc_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"filters\\\": 2, \\\"kernel_size\\\": [2], \\\"strides\\\": [1], \\\"padding\\\": \\\"valid\\\", \\\"data_format\\\": \\\"channels_last\\\", \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null, \\\"implementation\\\": 1}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 15, 21]}, \\\"name\\\": \\\"lc_0\\\", \\\"inbound_nodes\\\": [[[\\\"peptide\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"InputLayer\\\", \\\"config\\\": {\\\"batch_input_shape\\\": [null, 1], \\\"dtype\\\": \\\"float32\\\", \\\"sparse\\\": false, \\\"ragged\\\": false, \\\"name\\\": \\\"allele\\\"}, \\\"registered_name\\\": null, \\\"name\\\": \\\"allele\\\", \\\"inbound_nodes\\\": []}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Flatten\\\", \\\"config\\\": {\\\"name\\\": \\\"flattened_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"data_format\\\": \\\"channels_last\\\"}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 14, 2]}, \\\"name\\\": \\\"flattened_0\\\", \\\"inbound_nodes\\\": [[[\\\"lc_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Embedding\\\", \\\"config\\\": {\\\"name\\\": \\\"allele_representation\\\", \\\"trainable\\\": false, \\\"dtype\\\": \\\"float32\\\", \\\"batch_input_shape\\\": [null, 1], \\\"input_dim\\\": 3, \\\"output_dim\\\": 777, \\\"embeddings_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"RandomUniform\\\", \\\"config\\\": {\\\"minval\\\": -0.05, \\\"maxval\\\": 0.05, \\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"embeddings_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"embeddings_constraint\\\": null, \\\"mask_zero\\\": false, \\\"input_length\\\": 1}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 1]}, \\\"name\\\": \\\"allele_representation\\\", \\\"inbound_nodes\\\": [[[\\\"allele\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"peptide_dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 3, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 28]}, \\\"name\\\": \\\"peptide_dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"flattened_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"allele_dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 3, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 1, 777]}, \\\"name\\\": \\\"allele_dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"allele_representation\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_early\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 3]}, \\\"name\\\": \\\"batch_norm_early\\\", \\\"inbound_nodes\\\": [[[\\\"peptide_dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Flatten\\\", \\\"config\\\": {\\\"name\\\": \\\"allele_flat\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"data_format\\\": \\\"channels_last\\\"}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 1, 3]}, \\\"name\\\": \\\"allele_flat\\\", \\\"inbound_nodes\\\": [[[\\\"allele_dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Concatenate\\\", \\\"config\\\": {\\\"name\\\": \\\"allele_peptide_merged\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": -1}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [[null, 3], [null, 3]]}, \\\"name\\\": \\\"allele_peptide_merged\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_early\\\", 0, 0, {}], [\\\"allele_flat\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 5, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 6]}, \\\"name\\\": \\\"dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"allele_peptide_merged\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 5]}, \\\"name\\\": \\\"batch_norm_0\\\", \\\"inbound_nodes\\\": [[[\\\"dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dropout\\\", \\\"config\\\": {\\\"name\\\": \\\"dropout_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"rate\\\": 0.19999999999999996, \\\"noise_shape\\\": null, \\\"seed\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 5]}, \\\"name\\\": \\\"dropout_0\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"output\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 1, \\\"activation\\\": \\\"sigmoid\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 5]}, \\\"name\\\": \\\"output\\\", \\\"inbound_nodes\\\": [[[\\\"dropout_0\\\", 0, 0, {}]]]}], \\\"input_layers\\\": [[\\\"peptide\\\", 0, 0], [\\\"allele\\\", 0, 0]], \\\"output_layers\\\": [[\\\"output\\\", 0, 0]]}, \\\"keras_version\\\": \\\"2.17.0\\\", \\\"backend\\\": \\\"tensorflow\\\"}\", \"network_weights\": null, \"network_weights_loader\": null, \"fit_info\": [], \"prediction_cache\": null}"
  },
  {
    "path": "test/data/master_pan_concat_fixture_predictions.json",
    "content": "{\"peptides\": [\"SYFPEITHI\", \"AAAAAAAAA\", \"CCCCCCCCC\", \"DDDDDDDDD\"], \"predictions\": [50000.0, 50000.0, 50000.0, 50000.0], \"alleles\": [\"HLA-A*01:01\", \"HLA-B*07:02\", \"HLA-A*01:01\", \"HLA-B*07:02\"], \"allele_to_sequence\": {\"HLA-A*01:01\": \"ACDEFGHIKLMNPQRSTVWYACDEFGHIKLMNPQRST\", \"HLA-B*07:02\": \"YWVTSRQPNMLKIHGFEDCAYWVTSRQPNMLKIHGFE\"}}"
  },
  {
    "path": "test/data/master_pan_multiply_fixture_config.json",
    "content": "{\"hyperparameters\": {\"allele_amino_acid_encoding\": \"BLOSUM62\", \"allele_dense_layer_sizes\": [3], \"peptide_dense_layer_sizes\": [3], \"peptide_allele_merge_method\": \"multiply\", \"peptide_allele_merge_activation\": \"\", \"layer_sizes\": [5], \"activation\": \"tanh\", \"output_activation\": \"sigmoid\", \"dropout_probability\": 0.8, \"batch_normalization\": true, \"locally_connected_layers\": [{\"filters\": 2, \"activation\": \"tanh\", \"kernel_size\": 2}], \"topology\": \"feedforward\", \"num_outputs\": 1, \"peptide_encoding\": {\"vector_encoding_name\": \"BLOSUM62\", \"alignment_method\": \"pad_middle\", \"left_edge\": 4, \"right_edge\": 4, \"max_length\": 15}, \"dense_layer_l1_regularization\": 0.001, \"dense_layer_l2_regularization\": 0.0, \"init\": \"glorot_uniform\", \"loss\": \"custom:mse_with_inequalities\", \"optimizer\": \"rmsprop\", \"learning_rate\": null, \"max_epochs\": 500, \"validation_split\": 0.1, \"early_stopping\": true, \"minibatch_size\": 128, \"data_dependent_initialization_method\": null, \"random_negative_affinity_min\": 20000.0, \"random_negative_affinity_max\": 50000.0, \"random_negative_output_indices\": null, \"random_negative_rate\": 0.0, \"random_negative_constant\": 0, \"random_negative_match_distribution\": true, \"random_negative_distribution_smoothing\": 0.0, \"random_negative_method\": \"recommended\", \"random_negative_binder_threshold\": null, \"random_negative_lengths\": [8, 9, 10, 11, 12, 13, 14, 15], \"patience\": 20, \"min_delta\": 0.0, \"train_data\": {}}, \"_network\": null, \"network_json\": \"{\\\"class_name\\\": \\\"Functional\\\", \\\"config\\\": {\\\"name\\\": \\\"predictor\\\", \\\"trainable\\\": true, \\\"layers\\\": [{\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"InputLayer\\\", \\\"config\\\": {\\\"batch_input_shape\\\": [null, 15, 21], \\\"dtype\\\": \\\"float32\\\", \\\"sparse\\\": false, \\\"ragged\\\": false, \\\"name\\\": \\\"peptide\\\"}, \\\"registered_name\\\": null, \\\"name\\\": \\\"peptide\\\", \\\"inbound_nodes\\\": []}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"LocallyConnected1D\\\", \\\"config\\\": {\\\"name\\\": \\\"lc_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"filters\\\": 2, \\\"kernel_size\\\": [2], \\\"strides\\\": [1], \\\"padding\\\": \\\"valid\\\", \\\"data_format\\\": \\\"channels_last\\\", \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null, \\\"implementation\\\": 1}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 15, 21]}, \\\"name\\\": \\\"lc_0\\\", \\\"inbound_nodes\\\": [[[\\\"peptide\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"InputLayer\\\", \\\"config\\\": {\\\"batch_input_shape\\\": [null, 1], \\\"dtype\\\": \\\"float32\\\", \\\"sparse\\\": false, \\\"ragged\\\": false, \\\"name\\\": \\\"allele\\\"}, \\\"registered_name\\\": null, \\\"name\\\": \\\"allele\\\", \\\"inbound_nodes\\\": []}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Flatten\\\", \\\"config\\\": {\\\"name\\\": \\\"flattened_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"data_format\\\": \\\"channels_last\\\"}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 14, 2]}, \\\"name\\\": \\\"flattened_0\\\", \\\"inbound_nodes\\\": [[[\\\"lc_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Embedding\\\", \\\"config\\\": {\\\"name\\\": \\\"allele_representation\\\", \\\"trainable\\\": false, \\\"dtype\\\": \\\"float32\\\", \\\"batch_input_shape\\\": [null, 1], \\\"input_dim\\\": 3, \\\"output_dim\\\": 777, \\\"embeddings_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"RandomUniform\\\", \\\"config\\\": {\\\"minval\\\": -0.05, \\\"maxval\\\": 0.05, \\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"embeddings_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"embeddings_constraint\\\": null, \\\"mask_zero\\\": false, \\\"input_length\\\": 1}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 1]}, \\\"name\\\": \\\"allele_representation\\\", \\\"inbound_nodes\\\": [[[\\\"allele\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"peptide_dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 3, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 28]}, \\\"name\\\": \\\"peptide_dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"flattened_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"allele_dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 3, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 1, 777]}, \\\"name\\\": \\\"allele_dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"allele_representation\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_early\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 3]}, \\\"name\\\": \\\"batch_norm_early\\\", \\\"inbound_nodes\\\": [[[\\\"peptide_dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Flatten\\\", \\\"config\\\": {\\\"name\\\": \\\"allele_flat\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"data_format\\\": \\\"channels_last\\\"}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 1, 3]}, \\\"name\\\": \\\"allele_flat\\\", \\\"inbound_nodes\\\": [[[\\\"allele_dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Multiply\\\", \\\"config\\\": {\\\"name\\\": \\\"allele_peptide_merged\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\"}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [[null, 3], [null, 3]]}, \\\"name\\\": \\\"allele_peptide_merged\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_early\\\", 0, 0, {}], [\\\"allele_flat\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"dense_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 5, \\\"activation\\\": \\\"tanh\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": {\\\"module\\\": \\\"keras.regularizers\\\", \\\"class_name\\\": \\\"L1L2\\\", \\\"config\\\": {\\\"l1\\\": 0.0010000000474974513, \\\"l2\\\": 0.0}, \\\"registered_name\\\": null}, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 3]}, \\\"name\\\": \\\"dense_0\\\", \\\"inbound_nodes\\\": [[[\\\"allele_peptide_merged\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"BatchNormalization\\\", \\\"config\\\": {\\\"name\\\": \\\"batch_norm_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"axis\\\": [1], \\\"momentum\\\": 0.99, \\\"epsilon\\\": 0.001, \\\"center\\\": true, \\\"scale\\\": true, \\\"beta_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"gamma_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_mean_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"moving_variance_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Ones\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"beta_regularizer\\\": null, \\\"gamma_regularizer\\\": null, \\\"beta_constraint\\\": null, \\\"gamma_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 5]}, \\\"name\\\": \\\"batch_norm_0\\\", \\\"inbound_nodes\\\": [[[\\\"dense_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dropout\\\", \\\"config\\\": {\\\"name\\\": \\\"dropout_0\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"rate\\\": 0.19999999999999996, \\\"noise_shape\\\": null, \\\"seed\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 5]}, \\\"name\\\": \\\"dropout_0\\\", \\\"inbound_nodes\\\": [[[\\\"batch_norm_0\\\", 0, 0, {}]]]}, {\\\"module\\\": \\\"keras.layers\\\", \\\"class_name\\\": \\\"Dense\\\", \\\"config\\\": {\\\"name\\\": \\\"output\\\", \\\"trainable\\\": true, \\\"dtype\\\": \\\"float32\\\", \\\"units\\\": 1, \\\"activation\\\": \\\"sigmoid\\\", \\\"use_bias\\\": true, \\\"kernel_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"GlorotUniform\\\", \\\"config\\\": {\\\"seed\\\": null}, \\\"registered_name\\\": null}, \\\"bias_initializer\\\": {\\\"module\\\": \\\"keras.initializers\\\", \\\"class_name\\\": \\\"Zeros\\\", \\\"config\\\": {}, \\\"registered_name\\\": null}, \\\"kernel_regularizer\\\": null, \\\"bias_regularizer\\\": null, \\\"activity_regularizer\\\": null, \\\"kernel_constraint\\\": null, \\\"bias_constraint\\\": null}, \\\"registered_name\\\": null, \\\"build_config\\\": {\\\"input_shape\\\": [null, 5]}, \\\"name\\\": \\\"output\\\", \\\"inbound_nodes\\\": [[[\\\"dropout_0\\\", 0, 0, {}]]]}], \\\"input_layers\\\": [[\\\"peptide\\\", 0, 0], [\\\"allele\\\", 0, 0]], \\\"output_layers\\\": [[\\\"output\\\", 0, 0]]}, \\\"keras_version\\\": \\\"2.17.0\\\", \\\"backend\\\": \\\"tensorflow\\\"}\", \"network_weights\": null, \"network_weights_loader\": null, \"fit_info\": [], \"prediction_cache\": null}"
  },
  {
    "path": "test/data/master_pan_multiply_fixture_predictions.json",
    "content": "{\"peptides\": [\"SYFPEITHI\", \"AAAAAAAAA\", \"CCCCCCCCC\", \"DDDDDDDDD\"], \"predictions\": [50000.0, 50000.0, 50000.0, 50000.0], \"alleles\": [\"HLA-A*01:01\", \"HLA-B*07:02\", \"HLA-A*01:01\", \"HLA-B*07:02\"], \"allele_to_sequence\": {\"HLA-A*01:01\": \"ACDEFGHIKLMNPQRSTVWYACDEFGHIKLMNPQRST\", \"HLA-B*07:02\": \"YWVTSRQPNMLKIHGFEDCAYWVTSRQPNMLKIHGFE\"}}"
  },
  {
    "path": "test/data/master_released_class1_affinity_predictions.json",
    "content": "{\"release\": \"2.2.0\", \"allele_specific\": {\"alleles\": [\"HLA-A*01:01\", \"HLA-B*07:01\", \"HLA-C*03:03\", \"HLA-A*01:01\", \"HLA-B*07:01\", \"HLA-C*03:03\"], \"peptides\": [\"SYFPEITHI\", \"NLVPMVATV\", \"LLFGYPVYV\", \"GILGFVFTL\", \"SLLMWITQC\", \"FLPSDFFPS\"], \"predictions\": [24452.379008653887, 7735.019327156681, 2587.3099121536384, 24210.461714088437, 10486.544291944927, 14799.753594255337]}, \"pan_allele\": {\"alleles\": [\"HLA-A*01:01\", \"HLA-B*07:01\", \"HLA-C*01:02\", \"HLA-A*01:01\", \"HLA-B*07:01\", \"HLA-C*01:02\"], \"peptides\": [\"SYFPEITHI\", \"NLVPMVATV\", \"LLFGYPVYV\", \"GILGFVFTL\", \"SLLMWITQC\", \"FLPSDFFPS\"], \"predictions\": [26048.673469654335, 9361.822193351605, 935.231481331825, 27538.157456313915, 24523.76119046196, 8001.970439100902]}}"
  },
  {
    "path": "test/data/master_released_class1_presentation_highscore_rows_metadata.json",
    "content": "{\n  \"allele_count\": 35,\n  \"context_count\": 9,\n  \"high_score_threshold\": 0.9,\n  \"low_score_stats\": {\n    \"pres_with_presentation_score\": {\n      \"context_count\": 9,\n      \"contexts_with_low_allele\": 9,\n      \"low_score_threshold\": 0.2\n    },\n    \"pres_without_presentation_score\": {\n      \"context_count\": 9,\n      \"contexts_with_low_allele\": 9,\n      \"low_score_threshold\": 0.2\n    }\n  },\n  \"low_score_threshold\": 0.2,\n  \"presentation_internal_affinity_provenance\": \"generated on Thu Jun 11 13:31:45 2020\",\n  \"presentation_provenance\": \"generated on Thu Jun 11 13:37:18 2020\",\n  \"release\": \"2.2.0\",\n  \"row_count\": 315,\n  \"score_columns\": [\n    \"pres_with_presentation_score\",\n    \"pres_without_presentation_score\"\n  ],\n  \"source_tf_predictions_csv\": \"/private/tmp/mhcflurry-cross-allele-1000-randflanks/tf_predictions.csv.gz\"\n}"
  },
  {
    "path": "test/expensive_verify_pretrain_optimizable.py",
    "content": "# Expensive test - not run by pytest.\n\nfrom mhcflurry import train_pan_allele_models_command\nfrom mhcflurry.downloads import get_path\nfrom mhcflurry.allele_encoding import AlleleEncoding\n\nimport pandas\nimport numpy\n\nPRETRAIN_DATA_PATH = get_path(\n    \"random_peptide_predictions\", \"predictions.csv.bz2\")\n\nFULL_TRAIN_DF = pandas.read_csv(\n        get_path(\n            \"data_curated\",\n            \"curated_training_data.no_mass_spec.csv.bz2\"))\nTRAIN_DF = FULL_TRAIN_DF.loc[\n    (FULL_TRAIN_DF.peptide.str.len() >= 8) &\n    (FULL_TRAIN_DF.peptide.str.len() <= 15)\n]\nALLELE_SEQUENCES = pandas.read_csv(\n    get_path(\"allele_sequences\", \"allele_sequences.csv\"),\n    index_col=0).sequence\nALLELE_SEQUENCES = ALLELE_SEQUENCES.loc[\n    ALLELE_SEQUENCES.index.isin(TRAIN_DF.allele)\n]\nTRAIN_DF = TRAIN_DF.loc[\n    TRAIN_DF.allele.isin(ALLELE_SEQUENCES.index)\n]\nFOLDS_DF = pandas.DataFrame(index=TRAIN_DF.index)\nFOLDS_DF[\"fold_0\"] = True\n\nHYPERPARAMTERS = {\n    'activation': 'tanh', 'allele_dense_layer_sizes': [],\n    'batch_normalization': False,\n    'dense_layer_l1_regularization': 0.0,\n    'dense_layer_l2_regularization': 0.0, 'dropout_probability': 0.5,\n    'early_stopping': True, 'init': 'glorot_uniform',\n    'layer_sizes': [1024, 512], 'learning_rate': None,\n    'locally_connected_layers': [], 'loss': 'custom:mse_with_inequalities',\n    'max_epochs': 0, 'min_delta': 0.0, 'minibatch_size': 128,\n    'optimizer': 'rmsprop', 'output_activation': 'sigmoid', 'patience': 20,\n    'peptide_allele_merge_activation': '',\n    'peptide_allele_merge_method': 'concatenate',\n    'peptide_amino_acid_encoding': 'BLOSUM62', 'peptide_dense_layer_sizes': [],\n    'peptide_encoding': {'alignment_method': 'left_pad_centered_right_pad',\n                         'max_length': 15, 'vector_encoding_name': 'BLOSUM62'},\n    'random_negative_affinity_max': 50000.0,\n    'random_negative_affinity_min': 20000.0, 'random_negative_constant': 25,\n    'random_negative_distribution_smoothing': 0.0,\n    'random_negative_match_distribution': True, 'random_negative_rate': 0.2,\n    'train_data': {'pretrain': True,\n                   'pretrain_max_epochs': 30,\n                   'pretrain_min_epochs': 5,\n                   'pretrain_patience': 3,\n                   'pretrain_peptides_per_step': 8,\n                   'pretrain_steps_per_epoch': 256},\n    'validation_split': 0.1,\n    'data_dependent_initialization_method': \"lsuv\",\n}\n\n\ndef verify_optimizable():\n    predictor = train_pan_allele_models_command.train_model(\n        work_item_name=\"work-item0\",\n        work_item_num=0,\n        num_work_items=1,\n        architecture_num=0,\n        num_architectures=1,\n        fold_num=0,\n        num_folds=1,\n        replicate_num=0,\n        num_replicates=1,\n        hyperparameters=HYPERPARAMTERS,\n        pretrain_data_filename=PRETRAIN_DATA_PATH,\n        verbose=1,\n        progress_print_interval=5.0,\n        predictor=None,\n        save_to=None,\n        constant_data={\n            'train_data': TRAIN_DF,\n            'folds_df': FOLDS_DF,\n            'allele_encoding': AlleleEncoding(\n                alleles=ALLELE_SEQUENCES.index.values,\n                allele_to_sequence=ALLELE_SEQUENCES.to_dict()),\n        },\n    )\n    (network,) = predictor.neural_networks\n    print(predictor, network)\n    print(network.fit_info)\n    pretrain_val_loss = network.fit_info[0][\"val_loss\"][-1]\n    print(pretrain_val_loss)\n    numpy.testing.assert_array_less(pretrain_val_loss, 0.1)\n\n\nif __name__ == \"__main__\":\n    verify_optimizable()\n"
  },
  {
    "path": "test/pytest_helpers.py",
    "content": "\"\"\"\nTest helper functions providing assertion utilities.\n\"\"\"\n\n\nimport sys\n\n\n_MHCFLURRY_COMMANDS = {\n    \"mhcflurry-calibrate-percentile-ranks\": \"mhcflurry.calibrate_percentile_ranks_command\",\n    \"mhcflurry-class1-train-allele-specific-models\": \"mhcflurry.train_allele_specific_models_command\",\n    \"mhcflurry-class1-select-allele-specific-models\": \"mhcflurry.select_allele_specific_models_command\",\n    \"mhcflurry-class1-train-pan-allele-models\": \"mhcflurry.train_pan_allele_models_command\",\n    \"mhcflurry-class1-select-pan-allele-models\": \"mhcflurry.select_pan_allele_models_command\",\n    \"mhcflurry-class1-train-processing-models\": \"mhcflurry.train_processing_models_command\",\n    \"mhcflurry-class1-select-processing-models\": \"mhcflurry.select_processing_models_command\",\n}\n\n\ndef mhcflurry_cli(command):\n    \"\"\"\n    Return argv prefix to run a mhcflurry command using the current interpreter.\n\n    This avoids picking up a system-installed mhcflurry that may rely on TensorFlow.\n    \"\"\"\n    module = _MHCFLURRY_COMMANDS.get(command)\n    if module is None:\n        raise ValueError(f\"Unknown mhcflurry command: {command}\")\n    return [sys.executable, \"-c\", f\"from {module} import run; run()\"]\n\n\ndef eq_(a, b, msg=None):\n    \"\"\"Assert that a equals b.\"\"\"\n    if msg:\n        assert a == b, msg\n    else:\n        assert a == b, f\"{a!r} != {b!r}\"\n\n\ndef assert_less(a, b, msg=None):\n    \"\"\"Assert that a < b.\"\"\"\n    if msg:\n        assert a < b, msg\n    else:\n        assert a < b, f\"{a!r} is not less than {b!r}\"\n\n\ndef assert_greater(a, b, msg=None):\n    \"\"\"Assert that a > b.\"\"\"\n    if msg:\n        assert a > b, msg\n    else:\n        assert a > b, f\"{a!r} is not greater than {b!r}\"\n\n\ndef assert_almost_equal(a, b, places=7, msg=None):\n    \"\"\"Assert that a and b are equal up to `places` decimal places.\"\"\"\n    diff = abs(a - b)\n    threshold = 10 ** (-places)\n    if msg:\n        assert diff < threshold, msg\n    else:\n        assert diff < threshold, (\n            f\"{a!r} != {b!r} within {places} places (diff={diff})\"\n        )\n\n\ndef assert_raises(exc_class, func=None, *args, **kwargs):\n    \"\"\"\n    Assert that calling func raises exc_class.\n    Can also be used as a context manager.\n    \"\"\"\n    import pytest\n    if func is None:\n        return pytest.raises(exc_class)\n    with pytest.raises(exc_class):\n        func(*args, **kwargs)\n"
  },
  {
    "path": "test/test_allele_encoding.py",
    "content": "\nimport time\n\nfrom mhcflurry.allele_encoding import AlleleEncoding\nfrom mhcflurry.amino_acid import BLOSUM62_MATRIX\nfrom numpy.testing import assert_equal\n\n\ndef test_allele_encoding_speed():\n    encoding = AlleleEncoding(\n        [\"A*02:01\", \"A*02:03\", \"A*02:01\"],\n        {\n            \"A*02:01\": \"AC\",\n            \"A*02:03\": \"AE\",\n        }\n    )\n    start = time.time()\n    encoding1 = encoding.fixed_length_vector_encoded_sequences(\"BLOSUM62\")\n    assert_equal(\n        [\n            [BLOSUM62_MATRIX[\"A\"], BLOSUM62_MATRIX[\"C\"]],\n            [BLOSUM62_MATRIX[\"A\"], BLOSUM62_MATRIX[\"E\"]],\n            [BLOSUM62_MATRIX[\"A\"], BLOSUM62_MATRIX[\"C\"]],\n        ], encoding1)\n    print(\"Simple encoding in %0.2f sec.\" % (time.time() - start))\n    print(encoding1)\n\n    encoding = AlleleEncoding(\n        [\"A*02:01\", \"A*02:03\", \"A*02:01\"] * int(1e5),\n        {\n            \"A*02:01\": \"AC\" * 16,\n            \"A*02:03\": \"AE\" * 16,\n        }\n    )\n    start = time.time()\n    encoding1 = encoding.fixed_length_vector_encoded_sequences(\"BLOSUM62\")\n    print(\"Long encoding in %0.2f sec.\" % (time.time() - start))\n"
  },
  {
    "path": "test/test_amino_acid.py",
    "content": "\"\"\"Tests for amino acid encoding.\"\"\"\n\nfrom mhcflurry import amino_acid\nfrom numpy.testing import assert_equal\nimport pandas\nimport warnings\n\nletter_to_index_dict = {\n    'A': 0,\n    'B': 1,\n    'C': 2,\n}\n\n\ndef test_index_and_one_hot_encoding():\n    letter_to_vector_df = pandas.DataFrame(\n        [\n            [1, 0, 0,],\n            [0, 1, 0,],\n            [0, 0, 1,]\n        ], columns=[0, 1, 2]\n    )\n\n    index_encoding = amino_acid.index_encoding(\n        [\"AAAA\", \"ABCA\"], letter_to_index_dict)\n    assert_equal(\n        index_encoding,\n        [\n            [0, 0, 0, 0],\n            [0, 1, 2, 0],\n        ])\n    one_hot = amino_acid.fixed_vectors_encoding(\n        index_encoding,\n        letter_to_vector_df)\n    assert one_hot.shape == (2, 4, 3)\n    assert_equal(\n        one_hot[0],\n        [\n            [1, 0, 0],\n            [1, 0, 0],\n            [1, 0, 0],\n            [1, 0, 0],\n        ])\n    assert_equal(\n        one_hot[1],\n        [\n            [1, 0, 0],\n            [0, 1, 0],\n            [0, 0, 1],\n            [1, 0, 0],\n        ])\n\n\ndef test_index_encoding_no_downcast_futurewarning():\n    with warnings.catch_warnings():\n        warnings.simplefilter(\"error\", FutureWarning)\n        index_encoding = amino_acid.index_encoding(\n            [\"AAAA\", \"ABCA\"], letter_to_index_dict)\n    assert index_encoding.dtype.kind in (\"i\", \"u\")\n"
  },
  {
    "path": "test/test_api_compat_shims.py",
    "content": "import inspect\nimport pytest\n\nfrom mhcflurry.class1_neural_network import Class1NeuralNetwork\nfrom mhcflurry.common import configure_tensorflow\nfrom mhcflurry.custom_loss import MSEWithInequalities, get_loss\nfrom mhcflurry.data_dependent_weights_initialization import get_activations\nfrom mhcflurry.local_parallelism import worker_init\n\n\ndef test_legacy_configure_tensorflow_entry_point():\n    with pytest.warns(FutureWarning, match=\"configure_tensorflow\"):\n        configure_tensorflow(backend=\"tensorflow\", gpu_device_nums=None, num_threads=1)\n\n\ndef test_legacy_worker_init_signature_kept():\n    params = inspect.signature(worker_init).parameters\n    assert \"keras_backend\" in params\n\n\ndef test_worker_init_preserves_empty_gpu_assignment(monkeypatch):\n    calls = []\n\n    def fake_configure_pytorch(**kwargs):\n        calls.append(kwargs)\n\n    monkeypatch.setattr(\n        \"mhcflurry.local_parallelism.configure_pytorch\",\n        fake_configure_pytorch,\n    )\n\n    worker_init(backend=\"auto\", gpu_device_nums=[])\n\n    assert calls == [{\"backend\": \"auto\", \"gpu_device_nums\": []}]\n\n\ndef test_legacy_cache_key_alias():\n    network_json = (\n        '{\"dense_layer_l1_regularization\": 0.1, '\n        '\"dense_layer_l2_regularization\": 0.2, \"layer_sizes\": [8]}'\n    )\n    assert (\n        Class1NeuralNetwork.keras_network_cache_key(network_json)\n        == Class1NeuralNetwork.model_cache_key(network_json)\n    )\n\n\ndef test_legacy_get_keras_loss_accessor():\n    standard = get_loss(\"mse\")\n    assert standard.get_keras_loss() == standard.loss\n\n    custom = MSEWithInequalities()\n    assert callable(custom.get_keras_loss())\n\n\ndef test_legacy_get_activations_symbol_kept():\n    params = inspect.signature(get_activations).parameters\n    assert tuple(params) == (\"model\", \"layer\", \"X_batch\")\n"
  },
  {
    "path": "test/test_calibrate_percentile_ranks_command.py",
    "content": "\"\"\"\nTests for calibrate percentile ranks command\n\"\"\"\n\nimport os\nimport shutil\nimport tempfile\nimport subprocess\nimport pytest\nimport sys\n\n\nfrom mhcflurry import Class1AffinityPredictor\nfrom mhcflurry.downloads import get_path\nfrom .pytest_helpers import mhcflurry_cli\n\nos.environ[\"CUDA_VISIBLE_DEVICES\"] = \"\"\nos.environ[\"MHCFLURRY_CLUSTER_WORKER_COMMAND\"] = (\n    f\"{sys.executable} -m mhcflurry.cluster_worker_entry_point\"\n)\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\n\ndef run_and_check(n_jobs=0, delete=True, additional_args=[]):\n    source_models_dir = get_path(\"models_class1_pan\", \"models.combined\")\n    dest_models_dir = tempfile.mkdtemp(prefix=\"mhcflurry-test-models\")\n\n    # Save a new predictor that has no percent rank calibration data.\n    original_predictor = Class1AffinityPredictor.load(source_models_dir)\n    print(\"Loaded predictor\", source_models_dir)\n    new_predictor = Class1AffinityPredictor(\n        class1_pan_allele_models=original_predictor.class1_pan_allele_models,\n        allele_to_sequence=original_predictor.allele_to_sequence,\n    )\n    new_predictor.save(dest_models_dir)\n    print(\"Saved predictor to\", dest_models_dir)\n\n    new_predictor = Class1AffinityPredictor.load(dest_models_dir)\n    assert len(new_predictor.allele_to_percent_rank_transform) == 0\n\n    args = mhcflurry_cli(\"mhcflurry-calibrate-percentile-ranks\") + [\n        \"--models-dir\", dest_models_dir,\n        \"--match-amino-acid-distribution-data\", get_path(\n            \"data_curated\", \"curated_training_data.affinity.csv.bz2\"),\n        \"--motif-summary\",\n        \"--num-peptides-per-length\", \"1000\",\n        \"--allele\", \"HLA-A*02:01\", \"HLA-B*07:02\",\n        \"--verbosity\", \"1\",\n        \"--num-jobs\", str(n_jobs),\n    ] + additional_args\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    new_predictor = Class1AffinityPredictor.load(dest_models_dir)\n    assert len(new_predictor.allele_to_percent_rank_transform) == 2\n\n    if delete:\n        print(\"Deleting: %s\" % dest_models_dir)\n        shutil.rmtree(dest_models_dir)\n    else:\n        print(\"Not deleting: %s\" % dest_models_dir)\n\n\ndef test_run_serial():\n    run_and_check(n_jobs=0)\n\n\ndef test_run_parallel():\n    run_and_check(n_jobs=2)\n\n\ndef test_run_cluster_parallelism(delete=True):\n    run_and_check(n_jobs=0, additional_args=[\n        '--cluster-parallelism',\n        '--cluster-results-workdir', '/tmp/',\n        '--cluster-max-retries', '0',\n    ], delete=delete)\n\n\nif __name__ == \"__main__\":\n    # run_and_check(n_jobs=0, delete=False)\n    # run_and_check(n_jobs=2, delete=False)\n    test_run_cluster_parallelism(delete=False)\n"
  },
  {
    "path": "test/test_changing_allele_representations.py",
    "content": "\nimport pandas\nimport pytest\n\nfrom mhcflurry.class1_affinity_predictor import Class1AffinityPredictor\nfrom mhcflurry.downloads import get_path\n\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\nALLELE_TO_SEQUENCE = pandas.read_csv(\n    get_path(\n        \"allele_sequences\", \"allele_sequences.csv\"),\n    index_col=0).sequence.to_dict()\n\nHYPERPARAMETERS = {\n    'activation': 'tanh',\n    'allele_dense_layer_sizes': [],\n    'batch_normalization': False,\n    'dense_layer_l1_regularization': 0.0,\n    'dense_layer_l2_regularization': 0.0,\n    'dropout_probability': 0.5,\n    'early_stopping': True,\n    'init': 'glorot_uniform',\n    'layer_sizes': [4],\n    'learning_rate': None,\n    'locally_connected_layers': [],\n    'loss': 'custom:mse_with_inequalities',\n    'max_epochs': 40,\n    'minibatch_size': 128,\n    'optimizer': 'rmsprop',\n    'output_activation': 'sigmoid',\n    'patience': 2,\n    'peptide_allele_merge_activation': '',\n    'peptide_allele_merge_method': 'concatenate',\n    'peptide_amino_acid_encoding': 'BLOSUM62',\n    'peptide_dense_layer_sizes': [],\n    'peptide_encoding': {\n        'alignment_method': 'left_pad_centered_right_pad',\n        'max_length': 15,\n        'vector_encoding_name': 'BLOSUM62',\n    },\n    'random_negative_affinity_max': 50000.0,\n    'random_negative_affinity_min': 20000.0,\n    'random_negative_constant': 0,\n    'random_negative_distribution_smoothing': 0.0,\n    'random_negative_match_distribution': True,\n    'random_negative_rate': 0.0,\n    'train_data': {},\n    'validation_split': 0.1,\n}\n\n\ndef test_changing_allele_representations():\n    allele1 = \"HLA-A*02:01\"\n    allele2 = \"HLA-C*03:04\"\n    allele3 = \"HLA-B*07:02\"\n\n    peptide = \"SIINFEKL\"\n\n    allele_to_sequence = {}\n    for allele in [allele1, allele2]:\n        allele_to_sequence[allele] = ALLELE_TO_SEQUENCE[allele]\n\n    data1 = []\n    for i in range(5000):\n        data1.append((allele1, peptide, 0, \"=\"))\n        data1.append((allele2, peptide, 50000, \"=\"))\n    data1 = pandas.DataFrame(\n        data1, columns=[\"allele\", \"peptide\", \"affinity\", \"inequality\"])\n\n    predictor = Class1AffinityPredictor(allele_to_sequence=allele_to_sequence)\n    predictor.fit_class1_pan_allele_models(\n        n_models=1,\n        architecture_hyperparameters=HYPERPARAMETERS,\n        alleles=data1.allele.values,\n        peptides=data1.peptide.values,\n        affinities=data1.affinity.values,\n        inequalities=data1.inequality.values)\n\n    (value1, value2) = predictor.predict([peptide, peptide], alleles=[allele1, allele2])\n    assert value1 < 100, value1\n    assert value2 > 4000, value2\n\n    allele_to_sequence[allele3] = ALLELE_TO_SEQUENCE[allele3]\n    predictor.allele_to_sequence = allele_to_sequence\n    predictor.clear_cache()\n\n    (value1, value2, value3) = predictor.predict(\n        [peptide, peptide, peptide],\n        alleles=[allele1, allele2, allele3])\n    assert value1 < 100, value1\n    assert value2 > 4000, value2\n\n\n\n\n\n\n\n"
  },
  {
    "path": "test/test_class1_affinity_predictor.py",
    "content": "\"\"\"Tests for Class1AffinityPredictor.\"\"\"\nimport pytest\n\nimport tempfile\nimport shutil\nimport logging\nimport warnings\nimport traceback\nimport sys\n\nimport numpy\nimport pandas\n\nfrom mhcflurry import Class1AffinityPredictor\n\nfrom numpy import testing\n\nfrom mhcflurry.downloads import get_path\nfrom mhcflurry.testing_utils import cleanup, startup\n\nDOWNLOADED_PREDICTOR = None\n\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    \"\"\"Setup and teardown for each test.\"\"\"\n    global DOWNLOADED_PREDICTOR\n    startup()\n    try:\n        DOWNLOADED_PREDICTOR = Class1AffinityPredictor.load()\n    except Exception:\n        DOWNLOADED_PREDICTOR = None\n    logging.basicConfig(level=logging.DEBUG)\n    yield\n    DOWNLOADED_PREDICTOR = None\n    cleanup()\n\n\n# To hunt down a weird warning we were seeing in pandas.\ndef warn_with_traceback(message, category, filename, lineno, file=None, line=None):\n    log = file if hasattr(file, \"write\") else sys.stderr\n    traceback.print_stack(file=log)\n    log.write(warnings.formatwarning(message, category, filename, lineno, line))\n\n\nwarnings.showwarning = warn_with_traceback\n\n\ndef predict_and_check(\n    allele, peptide, predictor=DOWNLOADED_PREDICTOR, expected_range=(0, 500)\n):\n    def debug():\n        print(\n            \"\\n%s\"\n            % (\n                predictor.predict_to_dataframe(\n                    peptides=[peptide],\n                    allele=allele,\n                    include_individual_model_predictions=True,\n                )\n            )\n        )\n\n        (prediction,) = predictor.predict(allele=allele, peptides=[peptide])\n        assert prediction >= expected_range[0], (predictor, prediction, debug())\n        assert prediction <= expected_range[1], (predictor, prediction, debug())\n\n\ndef test_a1_known_epitopes_in_newly_trained_model():\n    allele = \"HLA-A*01:01\"\n    df = pandas.read_csv(\n        get_path(\"data_curated\", \"curated_training_data.affinity.csv.bz2\")\n    )\n    df = df.loc[\n        (df.allele == allele)\n        & (df.peptide.str.len() >= 8)\n        & (df.peptide.str.len() <= 15)\n    ]\n\n    hyperparameters = {\n        \"max_epochs\": 100,\n        \"patience\": 10,\n        \"early_stopping\": True,\n        \"validation_split\": 0.2,\n        \"random_negative_rate\": 0.0,\n        \"random_negative_constant\": 25,\n        \"peptide_amino_acid_encoding\": \"BLOSUM62\",\n        \"use_embedding\": False,\n        \"kmer_size\": 15,\n        \"batch_normalization\": False,\n        \"locally_connected_layers\": [\n            {\"filters\": 8, \"activation\": \"tanh\", \"kernel_size\": 3}\n        ],\n        \"activation\": \"relu\",\n        \"output_activation\": \"sigmoid\",\n        \"layer_sizes\": [32],\n        \"random_negative_affinity_min\": 20000.0,\n        \"random_negative_affinity_max\": 50000.0,\n        \"dense_layer_l1_regularization\": 0.001,\n        \"dropout_probability\": 0.0,\n    }\n\n    predictor = Class1AffinityPredictor()\n    predictor.fit_allele_specific_predictors(\n        n_models=2,\n        architecture_hyperparameters_list=[hyperparameters],\n        allele=allele,\n        peptides=df.peptide.values,\n        affinities=df.measurement_value.values,\n        verbose=0,\n    )\n\n    predict_and_check(\"HLA-A*01:01\", \"EVDPIGHLY\", predictor=predictor)\n\n    models_dir = tempfile.mkdtemp(\"_models\")\n    print(models_dir)\n    predictor.save(models_dir)\n    predictor2 = Class1AffinityPredictor.load(models_dir)\n    predict_and_check(\"HLA-A*01:01\", \"EVDPIGHLY\", predictor=predictor2)\n    shutil.rmtree(models_dir)\n\n    predictor3 = Class1AffinityPredictor(\n        allele_to_allele_specific_models={\n            allele: [predictor.allele_to_allele_specific_models[allele][0]]\n        }\n    )\n    predict_and_check(\"HLA-A*01:01\", \"EVDPIGHLY\", predictor=predictor3)\n    models_dir = tempfile.mkdtemp(\"_models\")\n    print(models_dir)\n    predictor3.save(models_dir)\n    predictor4 = Class1AffinityPredictor.load(models_dir)\n    predict_and_check(\"HLA-A*01:01\", \"EVDPIGHLY\", predictor=predictor4)\n    shutil.rmtree(models_dir)\n\n\ndef test_class1_affinity_predictor_a0205_memorize_training_data():\n    # Memorize the dataset.\n    hyperparameters = dict(\n        activation=\"tanh\",\n        layer_sizes=[64],\n        max_epochs=100,\n        early_stopping=False,\n        validation_split=0.0,\n        locally_connected_layers=[],\n        dense_layer_l1_regularization=0.0,\n        dropout_probability=0.0,\n    )\n\n    allele = \"HLA-A*02:05\"\n\n    df = pandas.read_csv(\n        get_path(\"data_curated\", \"curated_training_data.affinity.csv.bz2\")\n    )\n    df = df.loc[df.allele == allele]\n    df = df.loc[df.peptide.str.len() == 9]\n    df = df.loc[df.measurement_type == \"quantitative\"]\n    df = df.loc[df.measurement_source == \"kim2014\"]\n\n    predictor = Class1AffinityPredictor()\n    predictor.fit_allele_specific_predictors(\n        n_models=2,\n        architecture_hyperparameters_list=[hyperparameters],\n        allele=allele,\n        peptides=df.peptide.values,\n        affinities=df.measurement_value.values,\n        verbose=0,\n    )\n    predictor.calibrate_percentile_ranks(num_peptides_per_length=1000)\n    ic50_pred = predictor.predict(df.peptide.values, allele=allele)\n    ic50_true = df.measurement_value.values\n    assert len(ic50_pred) == len(ic50_true)\n    testing.assert_allclose(\n        numpy.log(ic50_pred), numpy.log(ic50_true), rtol=0.2, atol=0.2\n    )\n\n    ic50_pred_df = predictor.predict_to_dataframe(df.peptide.values, allele=allele)\n    print(ic50_pred_df)\n    assert \"prediction_percentile\" in ic50_pred_df.columns\n    assert ic50_pred_df.prediction_percentile.isnull().sum() == 0\n\n    ic50_pred_df2 = predictor.predict_to_dataframe(\n        df.peptide.values, allele=allele, include_individual_model_predictions=True\n    )\n    print(ic50_pred_df2)\n\n    # Test an unknown allele\n    print(\"Starting unknown allele check\")\n    assert predictor.supported_alleles == [allele]\n    ic50_pred = predictor.predict(df.peptide.values, allele=\"HLA-A*02:01\", throw=False)\n    assert numpy.isnan(ic50_pred).all()\n\n    testing.assert_raises(\n        ValueError, predictor.predict, df.peptide.values, allele=\"HLA-A*02:01\"\n    )\n\n    assert predictor.supported_alleles == [allele]\n    testing.assert_raises(ValueError, predictor.predict, [\"AAAAA\"], allele=allele)  # too short\n    testing.assert_raises(\n        ValueError,\n        predictor.predict,\n        [\"AAAAAAAAAAAAAAAAAAAA\"],  # too long\n        allele=allele,\n    )\n    ic50_pred = predictor.predict(\n        [\"AAAAA\", \"AAAAAAAAA\", \"AAAAAAAAAAAAAAAAAAAA\"], allele=allele, throw=False\n    )\n    assert numpy.isnan(ic50_pred[0])\n    assert not numpy.isnan(ic50_pred[1])\n    assert numpy.isnan(ic50_pred[2])\n\n\ndef test_no_nans():\n    df = DOWNLOADED_PREDICTOR.predict_to_dataframe(\n        alleles=[\"A02:01\", \"A02:02\"], peptides=[\"SIINFEKL\", \"SIINFEKLL\"]\n    )\n    print(df)\n    assert not df.isnull().any().any()\n\n\ndef test_predict_implementations_equivalent():\n    for allele in [\"HLA-A02:01\", \"A02:02\"]:\n        for centrality_measure in [\"mean\", \"robust_mean\"]:\n            peptides = [\"SIINFEKL\", \"SYYNFIIIKL\", \"SIINKFELQY\"]\n\n            pred1 = DOWNLOADED_PREDICTOR.predict(\n                allele=allele,\n                peptides=peptides + [\"SSSN\"],\n                throw=False,\n                centrality_measure=centrality_measure,\n            )\n            pred2 = DOWNLOADED_PREDICTOR.predict_to_dataframe(\n                allele=allele,\n                peptides=peptides + [\"SSSN\"],\n                throw=False,\n                centrality_measure=centrality_measure,\n            ).prediction.values\n            testing.assert_almost_equal(pred1, pred2, decimal=2)\n\n            pred1 = DOWNLOADED_PREDICTOR.predict(\n                allele=allele, peptides=peptides, centrality_measure=centrality_measure\n            )\n            pred2 = DOWNLOADED_PREDICTOR.predict_to_dataframe(\n                allele=allele, peptides=peptides, centrality_measure=centrality_measure\n            ).prediction.values\n            testing.assert_almost_equal(pred1, pred2, decimal=2)\n\n\ndef test_no_runtime_warnings_for_unsupported_rows():\n    with warnings.catch_warnings():\n        warnings.simplefilter(\"error\", RuntimeWarning)\n        df = DOWNLOADED_PREDICTOR.predict_to_dataframe(\n            allele=\"HLA-A*02:01\",\n            peptides=[\"SIINFEKL\", \"SSSN\"],\n            throw=False,\n            include_confidence_intervals=True,\n            centrality_measure=\"mean\",\n        )\n        df2 = DOWNLOADED_PREDICTOR.predict_to_dataframe(\n            allele=\"HLA-A*02:01\",\n            peptides=[\"SSSN\"],\n            throw=False,\n            include_confidence_intervals=True,\n            centrality_measure=\"robust_mean\",\n        )\n    assert not numpy.isnan(df.loc[df.peptide == \"SIINFEKL\", \"prediction\"].iloc[0])\n    assert numpy.isnan(df.loc[df.peptide == \"SSSN\", \"prediction\"].iloc[0])\n    assert numpy.isnan(df2[\"prediction\"].iloc[0])\n"
  },
  {
    "path": "test/test_class1_neural_network.py",
    "content": "\"\"\"\nTests for Class1NeuralNetwork.\n\"\"\"\nimport pytest\n\nimport numpy\nfrom numpy import testing\n\n\nimport pandas\n\nfrom mhcflurry.class1_neural_network import Class1NeuralNetwork\nfrom mhcflurry.downloads import get_path\nfrom mhcflurry.common import random_peptides\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    \"\"\"Setup and teardown for each test.\"\"\"\n    startup()\n    yield\n    cleanup()\n\n\n@pytest.mark.slow\ndef test_class1_neural_network_a0205_training_accuracy():\n    \"\"\"Test that the network can memorize a small dataset.\"\"\"\n    # Memorize the dataset.\n    hyperparameters = dict(\n        activation=\"tanh\",\n        layer_sizes=[16],\n        max_epochs=500,\n        early_stopping=False,\n        validation_split=0.0,\n        locally_connected_layers=[\n            {\"filters\": 8, \"activation\": \"tanh\", \"kernel_size\": 3}\n        ],\n        dense_layer_l1_regularization=0.0,\n        dropout_probability=0.0,\n    )\n\n    # First test a Class1NeuralNetwork, then a Class1AffinityPredictor.\n    allele = \"HLA-A*02:05\"\n\n    df = pandas.read_csv(\n        get_path(\"data_curated\", \"curated_training_data.affinity.csv.bz2\")\n    )\n    df = df.loc[df.allele == allele]\n    df = df.loc[df.peptide.str.len() == 9]\n    df = df.loc[df.measurement_type == \"quantitative\"]\n    df = df.loc[df.measurement_source == \"kim2014\"]\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(df.peptide.values, df.measurement_value.values)\n    ic50_pred = predictor.predict(df.peptide.values)\n    ic50_true = df.measurement_value.values\n    assert len(ic50_pred) == len(ic50_true)\n    testing.assert_allclose(\n        numpy.log(ic50_pred), numpy.log(ic50_true), rtol=0.2, atol=0.2\n    )\n\n    # Test that a second predictor has the same architecture json.\n    # This is important for an optimization we use to re-use predictors of the\n    # same architecture at prediction time.\n    hyperparameters2 = dict(\n        activation=\"tanh\",\n        layer_sizes=[16],\n        max_epochs=1,\n        early_stopping=False,\n        validation_split=0.0,\n        locally_connected_layers=[\n            {\"filters\": 8, \"activation\": \"tanh\", \"kernel_size\": 3}\n        ],\n        dense_layer_l1_regularization=0.0,\n        dropout_probability=0.0,\n    )\n    predictor2 = Class1NeuralNetwork(**hyperparameters2)\n    predictor2.fit(df.peptide.values, df.measurement_value.values, verbose=0)\n    assert predictor.network().to_json() == predictor2.network().to_json()\n\n\ndef test_inequalities():\n    \"\"\"Test that inequality constraints are properly handled.\"\"\"\n    # Memorize the dataset.\n    hyperparameters = dict(\n        peptide_amino_acid_encoding=\"one-hot\",\n        activation=\"tanh\",\n        layer_sizes=[4],\n        max_epochs=200,\n        minibatch_size=32,\n        random_negative_rate=0.0,\n        random_negative_constant=0,\n        early_stopping=False,\n        validation_split=0.0,\n        locally_connected_layers=[],\n        dense_layer_l1_regularization=0.0,\n        dropout_probability=0.0,\n        loss=\"custom:mse_with_inequalities_and_multiple_outputs\",\n    )\n\n    dfs = []\n\n    # Weak binders\n    df = pandas.DataFrame()\n    df[\"peptide\"] = random_peptides(500, length=9)\n    df[\"value\"] = 400.0\n    df[\"inequality1\"] = \"=\"\n    df[\"inequality2\"] = \"<\"\n    dfs.append(df)\n\n    # Strong binders - same peptides as above but more measurement values\n    df = pandas.DataFrame()\n    df[\"peptide\"] = dfs[-1].peptide.values\n    df[\"value\"] = 1.0\n    df[\"inequality1\"] = \"=\"\n    df[\"inequality2\"] = \"=\"\n    dfs.append(df)\n\n    # Non-binders\n    df = pandas.DataFrame()\n    df[\"peptide\"] = random_peptides(500, length=10)\n    df[\"value\"] = 1000\n    df[\"inequality1\"] = \">\"\n    df[\"inequality2\"] = \">\"\n    dfs.append(df)\n\n    df = pandas.concat(dfs, ignore_index=True)\n\n    fit_kwargs = {\"verbose\": 0}\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(\n        df.peptide.values,\n        df.value.values,\n        inequalities=df.inequality1.values,\n        **fit_kwargs\n    )\n    df[\"prediction1\"] = predictor.predict(df.peptide.values)\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(\n        df.peptide.values,\n        df.value.values,\n        inequalities=df.inequality2.values,\n        **fit_kwargs\n    )\n    df[\"prediction2\"] = predictor.predict(df.peptide.values)\n\n    # Binders should be stronger\n    for pred in [\"prediction1\", \"prediction2\"]:\n        assert df.loc[df.value < 1000, pred].mean() < 500\n        assert df.loc[df.value >= 1000, pred].mean() > 500\n\n    # For the binders, the (=) on the weak-binding measurement (100) in\n    # inequality1 should make the prediction weaker, whereas for inequality2\n    # this measurement is a \"<\" so it should allow the strong-binder measurement\n    # to dominate.\n    numpy.testing.assert_array_less(5.0, df.loc[df.value == 1].prediction1.values)\n    numpy.testing.assert_array_less(df.loc[df.value == 1].prediction2.values, 2.0)\n    numpy.testing.assert_allclose(df.loc[df.value == 1].prediction2.values, 1.0, atol=0.5)\n    print(df.groupby(\"value\")[[\"prediction1\", \"prediction2\"]].mean())\n\n\ndef test_basic_training():\n    \"\"\"Test basic network training with synthetic data.\"\"\"\n    hyperparameters = dict(\n        activation=\"tanh\",\n        layer_sizes=[16],\n        max_epochs=50,\n        early_stopping=False,\n        validation_split=0.0,\n        locally_connected_layers=[\n            {\"filters\": 8, \"activation\": \"tanh\", \"kernel_size\": 3}\n        ],\n        dense_layer_l1_regularization=0.0,\n        dropout_probability=0.0,\n    )\n\n    # Generate synthetic data\n    peptides = random_peptides(100, length=9)\n    affinities = numpy.random.uniform(10, 50000, 100)\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(peptides, affinities, verbose=0)\n\n    predictions = predictor.predict(peptides)\n    assert len(predictions) == len(peptides)\n    assert predictions.min() > 0\n    assert predictions.max() < 100000\n\n\ndef test_serialization():\n    \"\"\"Test that network weights can be serialized and deserialized.\"\"\"\n    hyperparameters = dict(\n        activation=\"tanh\",\n        layer_sizes=[16],\n        max_epochs=10,\n        early_stopping=False,\n        validation_split=0.0,\n        locally_connected_layers=[\n            {\"filters\": 8, \"activation\": \"tanh\", \"kernel_size\": 3}\n        ],\n    )\n\n    peptides = random_peptides(50, length=9)\n    affinities = numpy.random.uniform(10, 50000, 50)\n\n    # Train a network\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(peptides, affinities, verbose=0)\n\n    # Get predictions before serialization\n    preds_before = predictor.predict(peptides)\n\n    # Serialize and deserialize\n    config = predictor.get_config()\n    weights = predictor.get_weights()\n\n    predictor2 = Class1NeuralNetwork.from_config(config, weights=weights)\n    preds_after = predictor2.predict(peptides)\n\n    # Predictions should be identical\n    numpy.testing.assert_allclose(preds_before, preds_after, rtol=1e-5)\n\n\ndef test_different_peptide_lengths():\n    \"\"\"Test that the network handles different peptide lengths correctly.\"\"\"\n    hyperparameters = dict(\n        activation=\"tanh\",\n        layer_sizes=[16],\n        max_epochs=20,\n        validation_split=0.0,\n    )\n\n    # Mix of different length peptides\n    peptides = (\n        random_peptides(30, length=8) +\n        random_peptides(30, length=9) +\n        random_peptides(30, length=10) +\n        random_peptides(10, length=11)\n    )\n    affinities = numpy.random.uniform(10, 50000, 100)\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(peptides, affinities, verbose=0)\n\n    predictions = predictor.predict(peptides)\n    assert len(predictions) == len(peptides)\n\n\ndef test_early_stopping():\n    \"\"\"Test that early stopping works correctly.\"\"\"\n    hyperparameters = dict(\n        activation=\"tanh\",\n        layer_sizes=[16],\n        max_epochs=1000,\n        early_stopping=True,\n        patience=5,\n        validation_split=0.2,\n    )\n\n    peptides = random_peptides(200, length=9)\n    affinities = numpy.random.uniform(10, 50000, 200)\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(peptides, affinities, verbose=0)\n\n    # Should stop well before 1000 epochs\n    # (We can't easily check this without modifying the class to expose the final epoch)\n    predictions = predictor.predict(peptides)\n    assert len(predictions) == len(peptides)\n\n\ndef test_batch_normalization():\n    \"\"\"Test training with batch normalization.\"\"\"\n    hyperparameters = dict(\n        activation=\"relu\",\n        layer_sizes=[16],\n        max_epochs=20,\n        validation_split=0.0,\n        batch_normalization=True,\n    )\n\n    peptides = random_peptides(100, length=9)\n    affinities = numpy.random.uniform(10, 50000, 100)\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(peptides, affinities, verbose=0)\n\n    predictions = predictor.predict(peptides)\n    assert len(predictions) == len(peptides)\n\n\ndef test_dropout():\n    \"\"\"Test training with dropout.\"\"\"\n    hyperparameters = dict(\n        activation=\"relu\",\n        layer_sizes=[32, 16],\n        max_epochs=20,\n        validation_split=0.0,\n        dropout_probability=0.5,\n    )\n\n    peptides = random_peptides(100, length=9)\n    affinities = numpy.random.uniform(10, 50000, 100)\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(peptides, affinities, verbose=0)\n\n    predictions = predictor.predict(peptides)\n    assert len(predictions) == len(peptides)\n\n\ndef test_multiple_outputs():\n    \"\"\"Test network with multiple outputs.\"\"\"\n    hyperparameters = dict(\n        activation=\"tanh\",\n        layer_sizes=[16],\n        max_epochs=50,\n        validation_split=0.0,\n        num_outputs=2,\n        loss=\"custom:mse_with_inequalities_and_multiple_outputs\",\n        locally_connected_layers=[],\n    )\n\n    peptides = random_peptides(100, length=9)\n    affinities = numpy.random.uniform(0.0, 1.0, 100)\n    output_indices = numpy.random.choice([0, 1], 100)\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    predictor.fit(\n        peptides, affinities, output_indices=output_indices, verbose=0\n    )\n\n    # Predict for each output\n    predictions0 = predictor.predict(peptides, output_index=0)\n    predictions1 = predictor.predict(peptides, output_index=1)\n\n    assert len(predictions0) == len(peptides)\n    assert len(predictions1) == len(peptides)\n"
  },
  {
    "path": "test/test_class1_pan.py",
    "content": "\"\"\"\nTests for training and predicting using Class1 pan-allele models.\n\"\"\"\n\nfrom sklearn.metrics import roc_auc_score\nimport pandas\nimport pytest\n\nfrom numpy.testing import assert_\n\nfrom mhcflurry import Class1NeuralNetwork\nfrom mhcflurry.allele_encoding import AlleleEncoding\nfrom mhcflurry.downloads import get_path\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\n\nHYPERPARAMETERS = {\n    'activation': 'tanh',\n    'allele_dense_layer_sizes': [],\n    'batch_normalization': False,\n    'dense_layer_l1_regularization': 0.0,\n    'dense_layer_l2_regularization': 0.0,\n    'dropout_probability': 0.5,\n    'early_stopping': True,\n    'init': 'glorot_uniform',\n    'layer_sizes': [64],\n    'learning_rate': None,\n    'locally_connected_layers': [],\n    'loss': 'custom:mse_with_inequalities',\n    'max_epochs': 5000,\n    'minibatch_size': 256,\n    'optimizer': 'rmsprop',\n    'output_activation': 'sigmoid',\n    'patience': 5,\n    'peptide_allele_merge_activation': '',\n    'peptide_allele_merge_method': 'concatenate',\n    'peptide_amino_acid_encoding': 'BLOSUM62',\n    'peptide_dense_layer_sizes': [],\n    'peptide_encoding': {\n        'alignment_method': 'left_pad_centered_right_pad',\n        'max_length': 15,\n        'vector_encoding_name': 'BLOSUM62',\n    },\n    'random_negative_affinity_max': 50000.0,\n    'random_negative_affinity_min': 20000.0,\n    'random_negative_constant': 25,\n    'random_negative_distribution_smoothing': 0.0,\n    'random_negative_match_distribution': True,\n    'random_negative_rate': 0.2,\n    'random_negative_method': 'by_allele',\n    'train_data': {},\n    'validation_split': 0.1,\n}\n\n\nALLELE_TO_SEQUENCE = pandas.read_csv(\n    get_path(\n        \"allele_sequences\", \"allele_sequences.csv\"),\n    index_col=0).sequence.to_dict()\n\n\nTRAIN_DF = pandas.read_csv(\n    get_path(\n        \"data_curated\", \"curated_training_data.affinity.csv.bz2\"))\n\nTRAIN_DF = TRAIN_DF.loc[TRAIN_DF.allele.isin(ALLELE_TO_SEQUENCE)]\nTRAIN_DF = TRAIN_DF.loc[TRAIN_DF.peptide.str.len() >= 8]\nTRAIN_DF = TRAIN_DF.loc[TRAIN_DF.peptide.str.len() <= 15]\n\nTRAIN_DF = TRAIN_DF.loc[\n    TRAIN_DF.allele.isin(TRAIN_DF.allele.value_counts().iloc[:3].index)\n]\n\n\nMS_HITS_DF = pandas.read_csv(\n    get_path(\n        \"data_curated\", \"curated_training_data.csv.bz2\"))\nMS_HITS_DF = MS_HITS_DF.loc[MS_HITS_DF.allele.isin(TRAIN_DF.allele.unique())]\nMS_HITS_DF = MS_HITS_DF.loc[MS_HITS_DF.peptide.str.len() >= 8]\nMS_HITS_DF = MS_HITS_DF.loc[MS_HITS_DF.peptide.str.len() <= 15]\nMS_HITS_DF = MS_HITS_DF.loc[~MS_HITS_DF.peptide.isin(TRAIN_DF.peptide)]\n\nprint(\"Loaded %d training and %d ms hits\" % (\n    len(TRAIN_DF), len(MS_HITS_DF)))\n\n\ndef test_train_simple():\n    # Reset random seeds to ensure reproducibility regardless of test order\n    import numpy\n    import random\n    import torch\n    numpy.random.seed(1)\n    random.seed(1)\n    torch.manual_seed(1)\n\n    network = Class1NeuralNetwork(**HYPERPARAMETERS)\n    allele_encoding = AlleleEncoding(\n        TRAIN_DF.allele.values,\n        allele_to_sequence=ALLELE_TO_SEQUENCE)\n    network.fit(\n        TRAIN_DF.peptide.values,\n        affinities=TRAIN_DF.measurement_value.values,\n        allele_encoding=allele_encoding,\n        inequalities=TRAIN_DF.measurement_inequality.values)\n\n    validation_df = MS_HITS_DF.copy()\n    validation_df[\"hit\"] = 1\n\n    decoys_df = MS_HITS_DF.copy()\n    decoys_df[\"hit\"] = 0\n    decoys_df[\"allele\"] = decoys_df.allele.sample(frac=1.0).values\n\n    validation_df = pandas.concat([validation_df, decoys_df], ignore_index=True)\n\n    predictions = network.predict(\n        peptides=validation_df.peptide.values,\n        allele_encoding=AlleleEncoding(\n            validation_df.allele.values, borrow_from=allele_encoding))\n\n    print(pandas.Series(predictions).describe())\n\n    score = roc_auc_score(validation_df.hit, -1 * predictions)\n    print(\"AUC\", score)\n\n    assert_(score > 0.6)\n\n"
  },
  {
    "path": "test/test_class1_presentation_predictor.py",
    "content": "\nimport pandas\nimport tempfile\nimport pickle\n\nimport numpy\nimport pytest\n\nfrom sklearn.metrics import roc_auc_score\n\nfrom mhcflurry import Class1AffinityPredictor, Class1ProcessingPredictor\nfrom mhcflurry.class1_presentation_predictor import Class1PresentationPredictor\nfrom mhcflurry.downloads import get_path\nfrom mhcflurry.testing_utils import cleanup, startup\nimport mhcflurry.class1_presentation_predictor\n\nfrom . import data_path\n\nmhcflurry.class1_presentation_predictor.PREDICT_CHUNK_SIZE = 15\n\n\n\ndef setup_module():\n    global AFFINITY_PREDICTOR\n    global CLEAVAGE_PREDICTOR\n    global CLEAVAGE_PREDICTOR_NO_FLANKING\n    global PRESENTATION_PREDICTOR\n    startup()\n    AFFINITY_PREDICTOR = Class1AffinityPredictor.load(\n        get_path(\"models_class1_pan\", \"models.combined\"),\n        optimization_level=0,\n        max_models=1)\n    CLEAVAGE_PREDICTOR = Class1ProcessingPredictor.load(\n        get_path(\"models_class1_processing\", \"models.selected.with_flanks\"),\n        max_models=1)\n    CLEAVAGE_PREDICTOR_NO_FLANKING = Class1ProcessingPredictor.load(\n        get_path(\"models_class1_processing\", \"models.selected.no_flank\"),\n        max_models=1)\n    PRESENTATION_PREDICTOR = Class1PresentationPredictor.load()\n\n\ndef teardown_module():\n    global AFFINITY_PREDICTOR\n    global CLEAVAGE_PREDICTOR\n    global CLEAVAGE_PREDICTOR_NO_FLANKING\n    global PRESENTATION_PREDICTOR\n    AFFINITY_PREDICTOR = None\n    CLEAVAGE_PREDICTOR = None\n    CLEAVAGE_PREDICTOR_NO_FLANKING = None\n    PRESENTATION_PREDICTOR = None\n    cleanup()\n\n\n@pytest.fixture(scope=\"module\")\ndef predictors():\n    return {\n        \"affinity_predictor\": AFFINITY_PREDICTOR,\n        \"cleavage_predictor\": CLEAVAGE_PREDICTOR,\n        \"cleavage_predictor_no_flanking\": CLEAVAGE_PREDICTOR_NO_FLANKING,\n        \"presentation_predictor\": PRESENTATION_PREDICTOR,\n    }\n\n\ndef test_basic(predictors):\n    df = pandas.read_csv(data_path(\"multiallelic.benchmark.small.csv.bz2\"))\n    train_df = df.loc[\n        df.sample_id.isin(sorted(df.sample_id.unique())[:3])\n    ]\n    test_df = df.loc[\n        ~df.sample_id.isin(train_df.sample_id.unique())\n    ]\n    test_df = test_df.sample(frac=0.01, weights=test_df.hit + 0.01)\n\n    experiment_to_alleles = (\n        df.drop_duplicates(\"sample_id\").set_index(\"sample_id\").hla.str.split().to_dict())\n\n    predictor = Class1PresentationPredictor(\n        affinity_predictor=predictors[\"affinity_predictor\"],\n        processing_predictor_without_flanks=predictors['cleavage_predictor_no_flanking'],\n        processing_predictor_with_flanks=predictors['cleavage_predictor'])\n\n    predictor.fit(\n        targets=train_df.hit.values,\n        peptides=train_df.peptide.values,\n        sample_names=train_df.sample_id.values,\n        alleles=experiment_to_alleles,\n        n_flanks=train_df.n_flank.values,\n        c_flanks=train_df.c_flank.values,\n        verbose=2)\n\n    # Calibrate with fake data.\n    predictor.calibrate_percentile_ranks(numpy.random.rand(100)**2)\n\n    def add_prediction_cols(test_df, predictor):\n        prediction1_df = predictor.predict(\n            peptides=test_df.peptide.values,\n            sample_names=test_df.sample_id.values,\n            alleles=experiment_to_alleles,\n            n_flanks=test_df.n_flank.values,\n            c_flanks=test_df.c_flank.values,\n            verbose=2)\n        print(prediction1_df)\n\n        prediction2_df = predictor.predict(\n            peptides=test_df.peptide.values,\n            sample_names=test_df.sample_id.values,\n            alleles=experiment_to_alleles,\n            verbose=2)\n        print(prediction2_df)\n\n        test_df[\"prediction1\"] = prediction1_df.presentation_score.values\n        test_df[\"prediction2\"] = prediction2_df.presentation_score.values\n        test_df[\"prediction1_percentile\"] = prediction1_df.presentation_percentile.values\n        test_df[\"prediction2_percentile\"] = prediction2_df.presentation_percentile.values\n\n        test_df[\"processing_score1\"] = prediction1_df.processing_score.values\n        test_df[\"processing_score2\"] = prediction2_df.processing_score.values\n\n        test_df[\"affinity1\"] = prediction1_df.affinity.values\n        test_df[\"affinity2\"] = prediction2_df.affinity.values\n\n\n    add_prediction_cols(test_df, predictor)\n\n    score1 = roc_auc_score(test_df.hit.values, test_df.prediction1.values)\n    score2 = roc_auc_score(test_df.hit.values, test_df.prediction2.values)\n\n    print(\"AUC\", score1, score2)\n\n    assert score1 > 0.8\n    assert score2 > 0.8\n\n    score1 = roc_auc_score(\n        test_df.hit.values, -test_df.prediction1_percentile.values)\n    score2 = roc_auc_score(\n        test_df.hit.values, -test_df.prediction2_percentile.values)\n    print(\"AUC (using percentiles)\", score1, score2)\n\n    assert score1 > 0.8\n    assert score2 > 0.8\n\n    # Test saving, loading, pickling\n    models_dir = tempfile.mkdtemp(\"_models\")\n    print(models_dir)\n    predictor.save(models_dir)\n    predictor2 = Class1PresentationPredictor.load(models_dir)\n    predictor3 = pickle.loads(\n        pickle.dumps(predictor, protocol=pickle.HIGHEST_PROTOCOL))\n    predictor4 = pickle.loads(\n        pickle.dumps(predictor2, protocol=pickle.HIGHEST_PROTOCOL))\n\n    for (i, other_predictor) in enumerate([predictor2, predictor3, predictor4]):\n        print(\"Testing identity\", i + 1)\n        other_test_df = test_df.copy()\n\n        del other_test_df[\"prediction1\"]\n        del other_test_df[\"prediction2\"]\n        add_prediction_cols(other_test_df, other_predictor)\n\n        numpy.testing.assert_array_almost_equal(\n            test_df[\"prediction1\"], other_test_df[\"prediction1\"], decimal=6)\n        numpy.testing.assert_array_almost_equal(\n            test_df[\"prediction2\"], other_test_df[\"prediction2\"], decimal=6)\n\n\ndef test_downloaded_predictor_small(predictors):\n    presentation_predictor = predictors['presentation_predictor']\n\n    # Test sequence scanning\n    scan_results = presentation_predictor.predict_sequences(\n        sequences=[\n            \"MESLVPGFN\",\n            \"QPYVFIKRS\",\n            \"AGGHSYGAD\",\n        ],\n        alleles={\n            \"HLA-A*02:01\": [\"HLA-A*02:01\"],\n            \"HLA-C*02:01\": [\"HLA-C*02:01\"],\n        },\n        peptide_lengths=[9],\n        result=\"best\")\n    print(scan_results)\n    assert len(scan_results) == 6\n\n    scan_results = presentation_predictor.predict_sequences(\n        sequences=[\n            \"MESLVPGFN\",\n            \"QPYVFIKRS\",\n            \"AGGHSYGAD\",\n        ],\n        alleles={\n            \"HLA-A*02:01\": [\"HLA-A*02:01\"],\n            \"HLA-C*02:01\": [\"HLA-C*02:01\"],\n        },\n        peptide_lengths=[8, 9],\n        result=\"best\")\n    print(scan_results)\n    assert len(scan_results) == 6\n\n    scan_results = presentation_predictor.predict_sequences(\n        sequences=[\n            \"MESLVPGFN\",\n            \"QPYVFIKRS\",\n            \"AGGHSYGAD\",\n        ],\n        alleles={\n            \"HLA-A*02:01\": [\"HLA-A*02:01\"],\n            \"HLA-C*02:01\": [\"HLA-C*02:01\"],\n        },\n        peptide_lengths=[9],\n        result=\"all\")\n    print(scan_results)\n    assert len(scan_results) == 6\n\n    scan_results = presentation_predictor.predict_sequences(\n        sequences=[\n            \"MESLVPGFN\",\n            \"QPYVFIKRS\",\n            \"AGGHSYGAD\",\n        ],\n        alleles={\n            \"HLA-A*02:01\": [\"HLA-A*02:01\"],\n            \"HLA-C*02:01\": [\"HLA-C*02:01\"],\n        },\n        peptide_lengths=[8, 9],\n        result=\"all\")\n    print(scan_results)\n    assert len(scan_results) == 18\n\n    scan_results = presentation_predictor.predict_sequences(\n        sequences=[\n            \"MESLVPGFN\",\n            \"QPYVFIKRS\",\n            \"AGGHSYGAD\",\n        ],\n        alleles={\n            \"HLA-A*02:01\": [\"HLA-A*02:01\"],\n            \"HLA-C*02:01\": [\"HLA-C*02:01\"],\n        },\n        peptide_lengths=[10],\n        result=\"all\")\n    print(scan_results)\n    assert len(scan_results) == 0\n\n\ndef test_downloaded_predictor(predictors):\n    presentation_predictor = predictors['presentation_predictor']\n\n    # Test sequence scanning\n    scan_results1 = presentation_predictor.predict_sequences(\n        sequences=[\n            \"MESLVPGFNEKTHVQLSLPVLQVRDVLVRGFGDSVEEVLSEARQHLKDGTCGLVEVEKGVLPQLE\",\n            \"QPYVFIKRSDARTAPHGHVMVELVAELEGIQYGRSGETLGVLVPHVGEIPVAYRKVLLRKNGNKG\",\n            \"AGGHSYGADLKSFDLGDELGTDPYEDFQENWNTKHSSGVTRELMRELNGGAYTRYVDNNFCGPDG\",\n        ],\n        alleles=[\n            \"HLA-A*02:01\",\n            \"HLA-A*03:01\",\n            \"HLA-B*57:01\",\n            \"HLA-B*44:02\",\n            \"HLA-C*02:01\",\n            \"HLA-C*07:01\",\n        ])\n    print(scan_results1)\n\n    assert len(scan_results1) == 3, str(scan_results1)\n    assert (scan_results1.affinity < 200).all(), str(scan_results1)\n    assert (scan_results1.presentation_score > 0.7).all(), str(scan_results1)\n\n    scan_results2 = presentation_predictor.predict_sequences(\n        result=\"filtered\",\n        filter_value=500,\n        comparison_quantity=\"affinity\",\n        sequences={\n            \"seq1\": \"MESLVPGFNEKTHVQLSLPVLQVRDVLVRGFGDSVEEVLSEARQHLKDGTCGLVEVEKGVLPQLE\",\n            \"seq2\": \"QPYVFIKRSDARTAPHGHVMVELVAELEGIQYGRSGETLGVLVPHVGEIPVAYRKVLLRKNGNKG\",\n            \"seq3\": \"AGGHSYGADLKSFDLGDELGTDPYEDFQENWNTKHSSGVTRELMRELNGGAYTRYVDNNFCGPDG\",\n        },\n        alleles=[\n            \"HLA-A*02:01\",\n            \"HLA-A*03:01\",\n            \"HLA-B*57:01\",\n            \"HLA-B*44:02\",\n            \"HLA-C*02:01\",\n            \"HLA-C*07:01\",\n        ])\n    print(scan_results2)\n\n    assert len(scan_results2) > 10\n    assert (scan_results2.affinity <= 500).all()\n\n    scan_results3 = presentation_predictor.predict_sequences(\n        result=\"filtered\",\n        filter_value=0.9,\n        comparison_quantity=\"presentation_score\",\n        sequences={\n            \"seq1\": \"MESLVPGFNEKTHVQLSLPVLQVRDVLVRGFGDSVEEVLSEARQHLKDGTCGLVEVEKGVLPQLE\",\n            \"seq2\": \"QPYVFIKRSDARTAPHGHVMVELVAELEGIQYGRSGETLGVLVPHVGEIPVAYRKVLLRKNGNKG\",\n            \"seq3\": \"AGGHSYGADLKSFDLGDELGTDPYEDFQENWNTKHSSGVTRELMRELNGGAYTRYVDNNFCGPDG\",\n        },\n        alleles=[\n            \"HLA-A*02:01\",\n            \"HLA-A*03:01\",\n            \"HLA-B*57:01\",\n            \"HLA-B*44:02\",\n            \"HLA-C*02:01\",\n            \"HLA-C*07:01\",\n        ])\n    print(scan_results3)\n\n    assert len(scan_results3) >= 5, len(scan_results3)\n    assert (scan_results3.presentation_score >= 0.9).all()\n\n    scan_results4 = presentation_predictor.predict_sequences(\n        result=\"all\",\n        comparison_quantity=\"affinity\",\n        sequences={\n            \"seq1\": \"MESLVPGFNEKTHVQLSLPVLQVRDVLVRGFGDSVEEVLSEARQHLKDGTCGLVEVEKGVLPQLE\",\n            \"seq2\": \"QPYVFIKRSDARTAPHGHVMVELVAELEGIQYGRSGETLGVLVPHVGEIPVAYRKVLLRKNGNKG\",\n            \"seq3\": \"AGGHSYGADLKSFDLGDELGTDPYEDFQENWNTKHSSGVTRELMRELNGGAYTRYVDNNFCGPDG\",\n        },\n        alleles=[\n            \"HLA-A*02:01\",\n            \"HLA-A*03:01\",\n            \"HLA-B*57:01\",\n            \"HLA-B*44:02\",\n            \"HLA-C*02:01\",\n            \"HLA-C*07:01\",\n        ])\n    print(scan_results4)\n\n    assert len(scan_results4) > 200, len(scan_results4)\n    assert scan_results4.iloc[0].affinity < 100\n\n    sequences = {\n        \"seq1\":\n            \"MESLVPGFNEKTHVQLSLPVLQVRDVLVRGFGDSVEEVLSEARQHLKDGTCGLVEVEKGVLPQLE\",\n        \"seq2\":\n            \"QPYVFIKRSDARTAPHGHVMVELVAELEGIQYGRSGETLGVLVPHVGEIPVAYRKVLLRKNGNKG\",\n        \"seq3\":\n            \"AGGHSYGADLKSFDLGDELGTDPYEDFQENWNTKHSSGVTRELMRELNGGAYTRYVDNNFCGPDG\",\n    }\n\n    scan_results5 = presentation_predictor.predict_sequences(\n        result=\"all\",\n        comparison_quantity=\"affinity\",\n        sequences=sequences,\n        alleles={\n            \"sample1\": [\n                \"HLA-A*02:01\",\n                \"HLA-A*03:01\",\n                \"HLA-B*57:01\",\n                \"HLA-B*44:02\",\n                \"HLA-C*02:01\",\n                \"HLA-C*07:01\",\n            ],\n            \"sample2\": [\n                \"HLA-A*01:01\",\n                \"HLA-A*02:06\",\n                \"HLA-B*07:02\",\n                \"HLA-B*44:02\",\n                \"HLA-C*03:01\",\n                \"HLA-C*07:02\",\n            ],\n        })\n    print(scan_results5)\n    assert len(scan_results5) == len(scan_results4) * 2\n\n    # Test case-insensitive.\n    scan_results6 = presentation_predictor.predict_sequences(\n        result=\"all\",\n        comparison_quantity=\"affinity\",\n        sequences=dict((k, v.lower()) for (k, v) in sequences.items()),\n        alleles={\n            \"sample1\": [\n                \"HLA-A*02:01\",\n                \"HLA-A*03:01\",\n                \"HLA-B*57:01\",\n                \"HLA-B*44:02\",\n                \"HLA-C*02:01\",\n                \"HLA-C*07:01\",\n            ],\n            \"sample2\": [\n                \"HLA-A*01:01\",\n                \"HLA-A*02:06\",\n                \"HLA-B*07:02\",\n                \"HLA-B*44:02\",\n                \"HLA-C*03:01\",\n                \"HLA-C*07:02\",\n            ],\n        })\n\n    numpy.testing.assert_array_equal(\n        scan_results6.peptide.to_numpy(),\n        scan_results5.peptide.str.lower().to_numpy(),\n    )\n    numpy.testing.assert_almost_equal(\n        scan_results6.affinity.values, scan_results5.affinity.values)\n    numpy.testing.assert_almost_equal(\n        scan_results6.processing_score.values,\n        scan_results5.processing_score.values)\n    numpy.testing.assert_almost_equal(\n        scan_results6.presentation_score.values,\n        scan_results5.presentation_score.values)\n\n    scan_results7 = presentation_predictor.predict_sequences(\n        result=\"all\",\n        comparison_quantity=\"affinity\",\n        sequences={\n            \"seq1\": \"LVEVEKgVLPQLE\",\n            \"seq2\": \"MRELNGGAYTRYVDNNFCGPdg\",\n        },\n        alleles={\n            \"sample1\": [\n                \"HLA-A*02:01\",\n                \"HLA-A*03:01\",\n                \"HLA-B*57:01\",\n                \"HLA-B*44:02\",\n                \"HLA-C*02:01\",\n                \"HLA-C*07:01\",\n            ]\n        })\n    print(scan_results7)\n\n    # Check that c-terminus peptide is included and with the same case as input.\n    assert \"DNNFCGPdg\" in scan_results7.peptide.values, scan_results7.peptide\n\n\ndef test_downloaded_predictor_invalid_peptides(predictors):\n    presentation_predictor = predictors['presentation_predictor']\n\n    peptides = [\n        \"SIINFEKL\",\n        \"REALLYLNGPEPTIDESSSSS\",\n        \"SIINFEKLQ\",\n    ]\n    alleles = [\n        \"HLA-A*02:01\",\n        \"HLA-A*03:01\",\n        \"HLA-B*57:01\",\n        \"HLA-B*44:02\",\n        \"HLA-C*02:01\",\n        \"HLA-C*07:01\",\n    ]\n\n    numpy.testing.assert_raises(\n        ValueError,\n        presentation_predictor.predict,\n        peptides=peptides,\n        alleles=alleles)\n\n    results1 = presentation_predictor.predict(\n        peptides=peptides,\n        alleles=alleles,\n        throw=False).presentation_score.values\n    numpy.testing.assert_equal(numpy.isnan(results1), [False, True, False])\n"
  },
  {
    "path": "test/test_class1_processing_neural_network.py",
    "content": "\"\"\"\nTests for Class1ProcessingNeuralNetwork.\n\"\"\"\nimport pytest\n\nimport re\nimport numpy\nfrom sklearn.metrics import roc_auc_score\nimport pandas\n\nfrom mhcflurry.class1_processing_neural_network import Class1ProcessingNeuralNetwork\nfrom mhcflurry.common import random_peptides\nfrom mhcflurry.amino_acid import BLOSUM62_MATRIX\nfrom mhcflurry.flanking_encoding import FlankingEncoding\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\nnumpy.random.seed(0)\n\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    \"\"\"Setup and teardown for each test.\"\"\"\n    startup()\n    yield\n    cleanup()\n\n\ntable = dict([\n    (tuple(encoding), amino_acid)\n    for amino_acid, encoding in BLOSUM62_MATRIX.iterrows()\n])\n\n\ndef decode_matrix(array):\n    \"\"\"\n    Convert BLOSSUM62-encoded sequences to amino acid strings.\n\n    Parameters\n    ----------\n    array : shape (num, length, dim) where num is number of sequences,\n    length is the length of the sequences, and dim is the BLOSUM62 dimensions\n    (i.e. 21).\n\n    Returns\n    -------\n    list of strings\n    \"\"\"\n    (num, length, dim) = array.shape\n    assert dim == 21\n\n    results = []\n    for row in array:\n        item = \"\".join([\n            table[tuple(x)] for x in row\n        ])\n        results.append(item)\n    return results\n\n\ndef test_neural_network_input():\n    \"\"\"Test that input encoding produces expected sequences.\"\"\"\n    model = Class1ProcessingNeuralNetwork(\n        peptide_max_length=12,\n        n_flank_length=8,\n        c_flank_length=5)\n\n    tests = [\n        {\n            # Input\n            \"peptide\": \"SIINFEKL\",\n            \"n_flank\": \"QWERTYIPSDFG\",\n            \"c_flank\": \"FGHKLCVNMQWE\",\n\n            # Expected results\n            \"sequence\": \"TYIPSDFGSIINFEKLFGHKLXXXX\",\n        },\n        {\n            # Input\n            \"peptide\": \"QCV\",\n            \"n_flank\": \"QWERTYIPSDFG\",\n            \"c_flank\": \"FGHKLCVNMQWE\",\n\n            # Expected results\n            \"sequence\": \"TYIPSDFGQCVFGHKLXXXXXXXXX\",\n        },\n        {\n            # Input\n            \"peptide\": \"QCVSQCVSQCVS\",\n            \"n_flank\": \"QWE\",\n            \"c_flank\": \"MNV\",\n\n            # Expected results\n            \"sequence\": \"XXXXXQWEQCVSQCVSQCVSMNVXX\",\n        },\n        {\n            # Input\n            \"peptide\": \"QCVSQCVSQCVS\",\n            \"n_flank\": \"\",\n            \"c_flank\": \"MNV\",\n\n            # Expected results\n            \"sequence\": \"XXXXXXXXQCVSQCVSQCVSMNVXX\",\n        },\n        {\n            # Input\n            \"peptide\": \"QCVSQCVSQCVS\",\n            \"n_flank\": \"\",\n            \"c_flank\": \"\",\n\n            # Expected results\n            \"sequence\": \"XXXXXXXXQCVSQCVSQCVSXXXXX\",\n        },\n    ]\n\n    for (i, d) in enumerate(tests):\n        encoding = FlankingEncoding(\n            peptides=[d['peptide']],\n            n_flanks=[d['n_flank']],\n            c_flanks=[d['c_flank']])\n\n        results = model.network_input(encoding)\n        (decoded,) = decode_matrix(results['sequence'])\n\n        numpy.testing.assert_equal(decoded, d['sequence'])\n        numpy.testing.assert_equal(results['peptide_length'], len(d['peptide']))\n\n    # Test all at once\n    df = pandas.DataFrame(tests)\n    encoding = FlankingEncoding(df.peptide, df.n_flank, df.c_flank)\n    results = model.network_input(encoding)\n    df[\"decoded\"] = decode_matrix(results['sequence'])\n    numpy.testing.assert_array_equal(df.decoded.to_numpy(), df.sequence.to_numpy())\n    numpy.testing.assert_equal(\n        results['peptide_length'], df.peptide.str.len().values)\n\n\ndef test_small():\n    \"\"\"Test basic network training with small dataset.\"\"\"\n    train_basic_network(num=10000)\n\n\n@pytest.mark.slow\ndef test_more():\n    \"\"\"Test network with different hyperparameters.\"\"\"\n    train_basic_network(\n        num=10000,\n        flanking_averages=False,\n        convolutional_kernel_size=3,\n        c_flank_length=0,\n        n_flank_length=3,\n        post_convolutional_dense_layer_sizes=[8])\n\n\n@pytest.mark.slow\ndef test_basic_indexing(num=10000, do_assertions=True, **hyperparameters):\n    \"\"\"Test that basic indexing patterns are learned.\"\"\"\n    def is_hit(n_flank, c_flank, peptide):\n        return peptide[0] in \"SIQVL\" and peptide[-1] in \"YIPASD\"\n\n    def is_hit1(n_flank, c_flank, peptide):\n        return peptide[0] in \"SIQVL\"\n\n    def is_hit2(n_flank, c_flank, peptide):\n        return peptide[-1] in \"YIPASD\"\n\n    hypers = {\n        \"convolutional_kernel_size\": 1,\n        \"flanking_averages\": False,\n    }\n\n    train_basic_network(num=10000, is_hit=is_hit1, **hypers)\n    train_basic_network(num=10000, is_hit=is_hit2, **hypers)\n    train_basic_network(num=10000, is_hit=is_hit, **hypers)\n\n\ndef train_basic_network(num, do_assertions=True, is_hit=None, **hyperparameters):\n    \"\"\"Train a processing network and check performance.\"\"\"\n    use_hyperparameters = {\n        \"max_epochs\": 100,\n        \"peptide_max_length\": 12,\n        \"n_flank_length\": 8,\n        \"c_flank_length\": 8,\n        \"convolutional_kernel_size\": 3,\n        \"flanking_averages\": False,  # Use False for reliable convergence\n        \"min_delta\": 0.01,\n    }\n    use_hyperparameters.update(hyperparameters)\n\n    df = pandas.DataFrame({\n        \"n_flank\": random_peptides(int(num / 2), 10) + random_peptides(int(num / 2), 1),\n        \"c_flank\": random_peptides(num, 10),\n        \"peptide\": random_peptides(int(num / 2), 11) + random_peptides(int(num / 2), 8),\n    }).sample(frac=1.0)\n\n    if is_hit is None:\n        n_cleavage_regex = \"[AILQSV][SINFEKLH][MNPQYK]\"\n\n        def is_hit(n_flank, c_flank, peptide):\n            if re.search(n_cleavage_regex, peptide):\n                return False  # peptide is cleaved\n            return bool(re.match(n_cleavage_regex, n_flank[-1:] + peptide))\n\n    df[\"hit\"] = [\n        is_hit(row.n_flank, row.c_flank, row.peptide)\n        for (_, row) in df.iterrows()\n    ]\n\n    train_df = df.sample(frac=0.9).copy()\n    test_df = df.loc[~df.index.isin(train_df.index)].copy()\n\n    print(\n        \"Generated dataset\",\n        len(df),\n        \"hits: \",\n        df.hit.sum(),\n        \"frac:\",\n        df.hit.mean())\n\n    network = Class1ProcessingNeuralNetwork(**use_hyperparameters)\n    network.fit(\n        sequences=FlankingEncoding(\n            peptides=train_df.peptide.values,\n            n_flanks=train_df.n_flank.values,\n            c_flanks=train_df.c_flank.values),\n        targets=train_df.hit.values,\n        verbose=0)\n\n    print(network.network())\n\n    for df_subset in [train_df, test_df]:\n        df_subset[\"predictions\"] = network.predict(\n            df_subset.peptide.values,\n            df_subset.n_flank.values,\n            df_subset.c_flank.values)\n\n    train_auc = roc_auc_score(train_df.hit.values, train_df.predictions.values)\n    test_auc = roc_auc_score(test_df.hit.values, test_df.predictions.values)\n\n    print(\"Train auc\", train_auc)\n    print(\"Test auc\", test_auc)\n\n    if do_assertions:\n        assert train_auc > 0.9\n        assert test_auc > 0.85\n\n    return network\n\n\ndef test_serialization():\n    \"\"\"Test that network weights can be serialized and deserialized.\"\"\"\n    hyperparameters = {\n        \"max_epochs\": 10,\n        \"peptide_max_length\": 12,\n        \"n_flank_length\": 5,\n        \"c_flank_length\": 5,\n    }\n\n    # Generate training data\n    peptides = random_peptides(100, length=9)\n    n_flanks = random_peptides(100, length=10)\n    c_flanks = random_peptides(100, length=10)\n    targets = numpy.random.choice([0.0, 1.0], 100)\n\n    # Train a network\n    network = Class1ProcessingNeuralNetwork(**hyperparameters)\n    flanking = FlankingEncoding(peptides, n_flanks, c_flanks)\n    network.fit(flanking, targets, verbose=0)\n\n    # Get predictions before serialization\n    preds_before = network.predict_encoded(flanking)\n\n    # Serialize and deserialize\n    config = network.get_config()\n    weights = network.get_weights()\n\n    network2 = Class1ProcessingNeuralNetwork.from_config(config, weights=weights)\n    preds_after = network2.predict_encoded(flanking)\n\n    # Predictions should be close (some small differences may occur due to dropout eval mode)\n    numpy.testing.assert_allclose(preds_before, preds_after, rtol=1e-4)\n\n\ndef test_different_peptide_lengths():\n    \"\"\"Test that different peptide lengths are handled correctly.\"\"\"\n    hyperparameters = {\n        \"max_epochs\": 10,\n        \"peptide_max_length\": 15,\n        \"n_flank_length\": 5,\n        \"c_flank_length\": 5,\n    }\n\n    # Mix of peptide lengths\n    peptides = (\n        random_peptides(30, length=8) +\n        random_peptides(30, length=9) +\n        random_peptides(30, length=10) +\n        random_peptides(10, length=11)\n    )\n    n_flanks = random_peptides(100, length=10)\n    c_flanks = random_peptides(100, length=10)\n    targets = numpy.random.choice([0.0, 1.0], 100)\n\n    network = Class1ProcessingNeuralNetwork(**hyperparameters)\n    flanking = FlankingEncoding(peptides, n_flanks, c_flanks)\n    network.fit(flanking, targets, verbose=0)\n\n    predictions = network.predict_encoded(flanking)\n    assert len(predictions) == len(peptides)\n\n\ndef test_empty_flanks():\n    \"\"\"Test that empty flanking sequences are handled correctly.\"\"\"\n    hyperparameters = {\n        \"max_epochs\": 10,\n        \"peptide_max_length\": 12,\n        \"n_flank_length\": 5,\n        \"c_flank_length\": 5,\n    }\n\n    peptides = random_peptides(50, length=9)\n    n_flanks = [\"\"] * 50\n    c_flanks = [\"\"] * 50\n    targets = numpy.random.choice([0.0, 1.0], 50)\n\n    network = Class1ProcessingNeuralNetwork(**hyperparameters)\n    flanking = FlankingEncoding(peptides, n_flanks, c_flanks)\n    network.fit(flanking, targets, verbose=0)\n\n    predictions = network.predict_encoded(flanking)\n    assert len(predictions) == len(peptides)\n    assert numpy.isfinite(predictions).all()\n\n\ndef test_prediction_range():\n    \"\"\"Test that predictions are in the expected range [0, 1].\"\"\"\n    hyperparameters = {\n        \"max_epochs\": 20,\n        \"peptide_max_length\": 12,\n        \"n_flank_length\": 5,\n        \"c_flank_length\": 5,\n    }\n\n    peptides = random_peptides(100, length=9)\n    n_flanks = random_peptides(100, length=10)\n    c_flanks = random_peptides(100, length=10)\n    targets = numpy.random.choice([0.0, 1.0], 100)\n\n    network = Class1ProcessingNeuralNetwork(**hyperparameters)\n    flanking = FlankingEncoding(peptides, n_flanks, c_flanks)\n    network.fit(flanking, targets, verbose=0)\n\n    predictions = network.predict_encoded(flanking)\n\n    # Predictions should be between 0 and 1 (sigmoid output)\n    assert predictions.min() >= 0\n    assert predictions.max() <= 1\n"
  },
  {
    "path": "test/test_class1_processing_predictor.py",
    "content": "\nimport pandas\nimport tempfile\nimport pickle\n\nfrom numpy.testing import assert_array_equal\n\nfrom mhcflurry.class1_processing_predictor import Class1ProcessingPredictor\n\nfrom mhcflurry.common import random_peptides\n\n\nfrom .test_class1_processing_neural_network import train_basic_network\n\nAFFINITY_PREDICTOR = None\n\ndef setup():\n    pass\n\n\ndef teardown():\n    pass\n\n\ndef test_basic():\n    network = train_basic_network(num=10000, do_assertions=False, max_epochs=10)\n    predictor = Class1ProcessingPredictor(models=[network])\n\n    num=10000\n    df = pandas.DataFrame({\n        \"n_flank\": random_peptides(num, 10),\n        \"c_flank\": random_peptides(num, 10),\n        \"peptide\": random_peptides(num, 9),\n    })\n    df[\"score\"] = predictor.predict(df.peptide, df.n_flank, df.c_flank)\n\n    # Test predictions are deterministic\n    df1b = predictor.predict_to_dataframe(\n        peptides=df.peptide.values,\n        n_flanks=df.n_flank.values,\n        c_flanks=df.c_flank.values)\n    assert_array_equal(df.score.values, df1b.score.values)\n\n    # Test saving and loading\n    models_dir = tempfile.mkdtemp(\"_models\")\n    print(models_dir)\n    predictor.save(models_dir)\n    predictor2 = Class1ProcessingPredictor.load(models_dir)\n\n    df2 = predictor2.predict_to_dataframe(\n        peptides=df.peptide.values,\n        n_flanks=df.n_flank.values,\n        c_flanks=df.c_flank.values)\n    assert_array_equal(df.score.values, df2.score.values)\n\n    # Test pickling\n    predictor3 = pickle.loads(\n        pickle.dumps(predictor, protocol=pickle.HIGHEST_PROTOCOL))\n    df3 = predictor3.predict_to_dataframe(\n        peptides=df.peptide.values,\n        n_flanks=df.n_flank.values,\n        c_flanks=df.c_flank.values)\n    assert_array_equal(df.score.values, df3.score.values)\n\n"
  },
  {
    "path": "test/test_custom_loss.py",
    "content": "\"\"\"\nTests for custom loss functions.\n\"\"\"\nimport pytest\n\nfrom .pytest_helpers import assert_greater, assert_almost_equal\n\nimport numpy\nimport torch\nfrom mhcflurry.custom_loss import CUSTOM_LOSSES, MultiallelicMassSpecLoss\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    \"\"\"Setup and teardown for each test.\"\"\"\n    startup()\n    yield\n    cleanup()\n\n\ndef evaluate_loss(loss_obj, y_true, y_pred):\n    \"\"\"Evaluate a loss function with PyTorch tensors.\"\"\"\n    y_true = torch.tensor(y_true, dtype=torch.float32)\n    y_pred = torch.tensor(y_pred, dtype=torch.float32)\n\n    if y_pred.ndim == 1:\n        y_pred = y_pred.reshape(len(y_pred), 1)\n    if y_true.ndim == 1:\n        y_true = y_true.reshape(len(y_true), 1)\n\n    print(\"y_pred, y_true:\", y_pred, y_true)\n\n    assert y_true.ndim == 2\n    assert y_pred.ndim == 2\n\n    result = loss_obj.loss(y_pred, y_true)\n    return result.item()\n\n\ndef test_mse_with_inequalities(loss_obj=None):\n    \"\"\"Test MSE with inequalities loss function.\"\"\"\n    if loss_obj is None:\n        loss_obj = CUSTOM_LOSSES['mse_with_inequalities']\n\n    y_values = [0.0, 0.5, 0.8, 1.0]\n\n    adjusted_y = loss_obj.encode_y(y_values)\n    print(adjusted_y)\n    loss0 = evaluate_loss(loss_obj, adjusted_y, y_values)\n    print(loss0)\n    assert abs(loss0) < 1e-6, f\"Expected 0, got {loss0}\"\n\n    adjusted_y = loss_obj.encode_y(y_values, [\">\", \">\", \">\", \">\"])\n    loss0 = evaluate_loss(loss_obj, adjusted_y, y_values)\n    assert abs(loss0) < 1e-6, f\"Expected 0, got {loss0}\"\n\n    adjusted_y = loss_obj.encode_y(y_values, [\"<\", \"<\", \"<\", \"<\"])\n    loss0 = evaluate_loss(loss_obj, adjusted_y, y_values)\n    assert abs(loss0) < 1e-6, f\"Expected 0, got {loss0}\"\n\n    adjusted_y = loss_obj.encode_y(y_values, [\"=\", \"<\", \"=\", \">\"])\n    loss0 = evaluate_loss(loss_obj, adjusted_y, y_values)\n    assert abs(loss0) < 1e-6, f\"Expected 0, got {loss0}\"\n\n    adjusted_y = loss_obj.encode_y(y_values, [\"=\", \"<\", \"=\", \">\"])\n    loss0 = evaluate_loss(loss_obj, adjusted_y, [0.0, 0.4, 0.8, 1.0])\n    assert abs(loss0) < 1e-6, f\"Expected 0, got {loss0}\"\n\n    adjusted_y = loss_obj.encode_y(y_values, [\">\", \"<\", \">\", \">\"])\n    loss0 = evaluate_loss(loss_obj, adjusted_y, [0.1, 0.4, 0.9, 1.0])\n    assert abs(loss0) < 1e-6, f\"Expected 0, got {loss0}\"\n\n    adjusted_y = loss_obj.encode_y(y_values, [\">\", \"<\", \">\", \">\"])\n    loss0 = evaluate_loss(loss_obj, adjusted_y, [0.1, 0.6, 0.9, 1.0])\n    assert_greater(loss0, 0.0)\n\n    adjusted_y = loss_obj.encode_y(y_values, [\"=\", \"<\", \">\", \">\"])\n    loss0 = evaluate_loss(loss_obj, adjusted_y, [0.1, 0.6, 0.9, 1.0])\n    assert_almost_equal(loss0, 0.02 / 4, places=5)\n\n    adjusted_y = loss_obj.encode_y(y_values, [\"=\", \"<\", \"=\", \">\"])\n    loss0 = evaluate_loss(loss_obj, adjusted_y, [0.1, 0.6, 0.9, 1.0])\n    assert_almost_equal(loss0, 0.03 / 4, places=5)\n\n\ndef test_mse_with_inequalities_and_multiple_outputs():\n    \"\"\"Test MSE with inequalities and multiple outputs loss function.\"\"\"\n    loss_obj = CUSTOM_LOSSES['mse_with_inequalities_and_multiple_outputs']\n    test_mse_with_inequalities(loss_obj)\n\n    y_values = [0.0, 0.5, 0.8, 1.0]\n    adjusted_y = loss_obj.encode_y(\n        y_values, output_indices=[0, 1, 1, 1])\n    loss0 = evaluate_loss(\n        loss_obj,\n        adjusted_y,\n        [\n            [0.0, 1000],\n            [2000, 0.5],\n            [3000, 0.8],\n            [4000, 1.0],\n        ])\n    assert_almost_equal(loss0, 0.0, places=5)\n\n    y_values = [0.0, 0.5, 0.8, 1.0]\n    adjusted_y = loss_obj.encode_y(\n        y_values, output_indices=[0, 1, 1, 0])\n    loss0 = evaluate_loss(\n        loss_obj,\n        adjusted_y,\n        [\n            [0.1, 1000],\n            [2000, 0.6],\n            [3000, 0.8],\n            [1.0, 4000],\n        ])\n    assert_almost_equal(loss0, 0.02 / 4, places=5)\n\n    y_values = [0.0, 0.5, 0.8, 1.0]\n    adjusted_y = loss_obj.encode_y(\n        y_values, output_indices=[0, 1, 1, 0], inequalities=[\"=\", \">\", \"<\", \"<\"])\n    loss0 = evaluate_loss(\n        loss_obj,\n        adjusted_y,\n        [\n            [0.1, 1000],\n            [2000, 0.6],\n            [3000, 0.8],\n            [1.0, 4000],\n        ])\n    assert_almost_equal(loss0, 0.01 / 4, places=5)\n\n    y_values = [0.0, 0.5, 0.8, 1.0]\n    adjusted_y = loss_obj.encode_y(\n        y_values, output_indices=[0, 1, 1, 0], inequalities=[\"=\", \"<\", \"<\", \"<\"])\n    loss0 = evaluate_loss(\n        loss_obj,\n        adjusted_y,\n        [\n            [0.1, 1000],\n            [2000, 0.6],\n            [3000, 0.8],\n            [1.0, 4000],\n        ])\n    assert_almost_equal(loss0, 0.02 / 4, places=5)\n\n\ndef test_multiallelic_mass_spec_loss():\n    \"\"\"Test multiallelic mass spec loss function.\"\"\"\n    for delta in [0.0, 0.3]:\n        print(\"delta\", delta)\n        # Hit labels\n        y_true = [\n            1.0,\n            0.0,\n            1.0,\n            -1.0,  # ignored\n            1.0,\n            0.0,\n            1.0,\n        ]\n        y_true = numpy.array(y_true)\n        y_pred = [\n            [0.3, 0.7, 0.5],\n            [0.2, 0.4, 0.6],\n            [0.1, 0.5, 0.3],\n            [0.9, 0.1, 0.2],\n            [0.1, 0.7, 0.1],\n            [0.8, 0.2, 0.4],\n            [0.1, 0.2, 0.4],\n        ]\n        y_pred = numpy.array(y_pred)\n\n        # reference implementation 1\n\n        def smooth_max(x, alpha):\n            x = numpy.array(x)\n            alpha = numpy.array([alpha])\n            return (x * numpy.exp(x * alpha)).sum() / (\n                numpy.exp(x * alpha)).sum()\n\n        contributions = []\n        for i in range(len(y_true)):\n            if y_true[i] == 1.0:\n                for j in range(len(y_true)):\n                    if y_true[j] == 0.0:\n                        tightest_i = max(y_pred[i])\n                        for k in range(y_pred.shape[1]):\n                            contribution = max(\n                                0, y_pred[j, k] - tightest_i + delta)**2\n                            contributions.append(contribution)\n        contributions = numpy.array(contributions)\n        expected1 = contributions.sum() / len(contributions)\n\n        # reference implementation 2: numpy\n        pos = numpy.array([\n            max(y_pred[i])\n            for i in range(len(y_pred))\n            if y_true[i] == 1.0\n        ])\n\n        neg = y_pred[(y_true == 0.0).astype(bool)]\n        term = neg.reshape((-1, 1)) - pos + delta\n        expected2 = (\n                numpy.maximum(0, term)**2).sum() / (\n            len(pos) * neg.shape[0] * neg.shape[1])\n\n        numpy.testing.assert_almost_equal(expected1, expected2)\n\n        loss_obj = MultiallelicMassSpecLoss(delta=delta)\n        computed = evaluate_loss(\n            loss_obj,\n            y_true,\n            y_pred.reshape(y_pred.shape))\n\n        numpy.testing.assert_almost_equal(computed, expected1, 4)\n\n\ndef test_encode_y_basic():\n    \"\"\"Test basic y encoding functionality.\"\"\"\n    from mhcflurry.pytorch_losses import MSEWithInequalities\n\n    # Test equality encoding\n    y = [0.0, 0.5, 1.0]\n    encoded = MSEWithInequalities.encode_y(y)\n    numpy.testing.assert_array_equal(encoded, y)\n\n    # Test greater than encoding (should add 2)\n    encoded_gt = MSEWithInequalities.encode_y(y, [\">\", \">\", \">\"])\n    numpy.testing.assert_array_equal(encoded_gt, [2.0, 2.5, 3.0])\n\n    # Test less than encoding (should add 4)\n    encoded_lt = MSEWithInequalities.encode_y(y, [\"<\", \"<\", \"<\"])\n    numpy.testing.assert_array_equal(encoded_lt, [4.0, 4.5, 5.0])\n\n\ndef test_loss_gradient_flow():\n    \"\"\"Test that gradients flow correctly through the loss.\"\"\"\n    from mhcflurry.pytorch_losses import MSEWithInequalities\n\n    loss_fn = MSEWithInequalities()\n\n    # Create predictions that require gradients\n    y_pred = torch.tensor([[0.5]], requires_grad=True)\n    y_true = torch.tensor([[0.3]])  # equality\n\n    loss = loss_fn(y_pred, y_true)\n    loss.backward()\n\n    # Gradient should exist and be non-zero\n    assert y_pred.grad is not None\n    assert y_pred.grad.abs().item() > 0\n\n\ndef test_inequality_gradient_respects_constraint():\n    \"\"\"Test that gradients respect inequality constraints.\"\"\"\n    from mhcflurry.pytorch_losses import MSEWithInequalities\n\n    loss_fn = MSEWithInequalities()\n\n    # Test greater-than constraint (y_true encoded as 2 + value)\n    # When pred > threshold, gradient should be 0\n    y_pred = torch.tensor([[0.7]], requires_grad=True)\n    y_true = torch.tensor([[2.5]])  # > 0.5\n\n    loss = loss_fn(y_pred, y_true)\n    loss.backward()\n\n    # Gradient should be 0 since pred (0.7) > threshold (0.5)\n    assert abs(y_pred.grad.item()) < 1e-6\n\n    # When pred < threshold, gradient should be non-zero\n    y_pred2 = torch.tensor([[0.3]], requires_grad=True)\n    y_true2 = torch.tensor([[2.5]])  # > 0.5\n\n    loss2 = loss_fn(y_pred2, y_true2)\n    loss2.backward()\n\n    # Gradient should be non-zero since pred (0.3) < threshold (0.5)\n    assert abs(y_pred2.grad.item()) > 0\n"
  },
  {
    "path": "test/test_doctest.py",
    "content": "\"\"\"\nRun doctests.\n\"\"\"\n\nimport os\nimport doctest\n\nimport pandas\nimport pytest\n\n\nimport mhcflurry\nimport mhcflurry.class1_presentation_predictor\n\nos.environ[\"CUDA_VISIBLE_DEVICES\"] = \"\"\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\n\ndef test_doctests():\n    original_precision = pandas.get_option('display.precision')\n    pandas.set_option('display.precision', 3)\n\n    doctest.testmod(mhcflurry)\n    doctest.testmod(mhcflurry.class1_presentation_predictor)\n\n    # Disabling for now until we figure out how to deal with numerical precision\n    # for predictions.\n    # assert results1.failed == 0, results1.failed\n    # assert results2.failed == 0, results2.failed\n\n    pandas.set_option('display.precision', original_precision)\n"
  },
  {
    "path": "test/test_download_models_class1.py",
    "content": "import numpy\nimport pickle\nimport tempfile\nimport pytest\n\nfrom mhcflurry import Class1AffinityPredictor, Class1NeuralNetwork\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\nnumpy.random.seed(0)\n\nSERIALIZATION_RTOL = 1e-6\n\nDOWNLOADED_PREDICTOR = None\n\n\ndef setup_module():\n    global DOWNLOADED_PREDICTOR\n    startup()\n    DOWNLOADED_PREDICTOR = Class1AffinityPredictor.load()\n\n\ndef teardown_module():\n    global DOWNLOADED_PREDICTOR\n    DOWNLOADED_PREDICTOR = None\n    cleanup()\n\n\n@pytest.fixture(scope=\"module\")\ndef downloaded_predictor():\n    return DOWNLOADED_PREDICTOR\n\n\ndef predict_and_check(\n        downloaded_predictor,\n        allele,\n        peptide,\n        expected_range=(0, 500)):\n\n    print(\"\\n%s\" % (\n        downloaded_predictor.predict_to_dataframe(\n            peptides=[peptide],\n            allele=allele,\n            include_individual_model_predictions=True)))\n\n    (prediction,) = downloaded_predictor.predict(allele=allele, peptides=[peptide])\n    assert prediction >= expected_range[0], (downloaded_predictor, prediction)\n    assert prediction <= expected_range[1], (downloaded_predictor, prediction)\n\n\ndef test_a1_titin_epitope_downloaded_models(downloaded_predictor):\n    # Test the A1 Titin epitope ESDPIVAQY from\n    #   Identification of a Titin-Derived HLA-A1-Presented Peptide\n    #   as a Cross-Reactive Target for Engineered MAGE A3-Directed\n    #   T Cells\n    predict_and_check(downloaded_predictor, \"HLA-A*01:01\", \"ESDPIVAQY\")\n\n\ndef test_a1_mage_epitope_downloaded_models(downloaded_predictor):\n    # Test the A1 MAGE epitope EVDPIGHLY from\n    #   Identification of a Titin-Derived HLA-A1-Presented Peptide\n    #   as a Cross-Reactive Target for Engineered MAGE A3-Directed\n    #   T Cells\n    predict_and_check(downloaded_predictor, \"HLA-A*01:01\", \"EVDPIGHLY\")\n\n\ndef test_a2_hiv_epitope_downloaded_models(downloaded_predictor):\n    # Test the A2 HIV epitope SLYNTVATL from\n    #    The HIV-1 HLA-A2-SLYNTVATL Is a Help-Independent CTL Epitope\n    predict_and_check(downloaded_predictor, \"HLA-A*02:01\", \"SLYNTVATL\")\n\n\ndef test_caching(downloaded_predictor):\n    if not downloaded_predictor.allele_to_sequence:\n        # Only run this test on allele-specific predictors.\n        Class1NeuralNetwork.KERAS_MODELS_CACHE.clear()\n        downloaded_predictor.predict(\n            peptides=[\"SIINFEKL\"],\n            allele=\"HLA-A*02:01\")\n        num_cached = len(Class1NeuralNetwork.KERAS_MODELS_CACHE)\n        assert num_cached > 0\n\n\ndef test_downloaded_predictor_is_serializable(downloaded_predictor):\n    predictor_copy = pickle.loads(pickle.dumps(downloaded_predictor))\n    # Optimized pan-model round-trips can differ at float-noise level across\n    # platforms, so require a tight numerical match rather than bitwise\n    # equality.\n    numpy.testing.assert_allclose(\n        downloaded_predictor.predict(\n            [\"RSKERAVVVAW\"], allele=\"HLA-A*01:01\")[0],\n        predictor_copy.predict(\n            [\"RSKERAVVVAW\"], allele=\"HLA-A*01:01\")[0],\n        rtol=SERIALIZATION_RTOL,\n        atol=0.0,\n    )\n\n\ndef test_downloaded_predictor_is_savable(downloaded_predictor):\n    models_dir = tempfile.mkdtemp(\"_models\")\n    print(models_dir)\n    downloaded_predictor.save(models_dir)\n    predictor_copy = Class1AffinityPredictor.load(models_dir)\n\n    numpy.testing.assert_allclose(\n        downloaded_predictor.predict(\n            [\"RSKERAVVVAW\"], allele=\"HLA-A*01:01\")[0],\n        predictor_copy.predict(\n            [\"RSKERAVVVAW\"], allele=\"HLA-A*01:01\")[0],\n        rtol=SERIALIZATION_RTOL,\n        atol=0.0,\n    )\n\n\ndef test_downloaded_predictor_gives_percentile_ranks(downloaded_predictor):\n    predictions = downloaded_predictor.predict_to_dataframe(\n        peptides=[\"SAQGQFSAV\", \"SAQGQFSAV\"],\n        alleles=[\"HLA-A*03:01\", \"HLA-C*01:02\"])\n\n    print(predictions)\n    assert not predictions.prediction.isnull().any()\n    assert not predictions.prediction_percentile.isnull().any()\n"
  },
  {
    "path": "test/test_ensemble_centrality.py",
    "content": "\nimport numpy\nimport warnings\n\nfrom numpy.testing import assert_equal\n\nfrom mhcflurry import ensemble_centrality\n\n\ndef test_robust_mean():\n    arr1 = numpy.array([\n        [1, 2, 3, 4, 5],\n        [-10000, 2, 3, 4, 100],\n    ])\n\n    results = ensemble_centrality.robust_mean(arr1)\n    assert_equal(results, [3, 3])\n\n    # Should ignore nans.\n    arr2 = numpy.array([\n        [1, 2, 3, 4, 5],\n        [numpy.nan, 1, 2, 3, numpy.nan],\n        [numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan],\n    ])\n\n    results = ensemble_centrality.CENTRALITY_MEASURES[\"robust_mean\"](arr2)\n    assert_equal(results, [3, 2, numpy.nan])\n\n    results = ensemble_centrality.CENTRALITY_MEASURES[\"mean\"](arr2)\n    assert_equal(results, [3, 2, numpy.nan])\n\n\ndef test_no_runtime_warnings_for_all_nan_rows():\n    arr = numpy.array([\n        [numpy.nan, numpy.nan, numpy.nan],\n        [1.0, 2.0, numpy.nan],\n    ])\n    with warnings.catch_warnings():\n        warnings.simplefilter(\"error\", RuntimeWarning)\n        mean = ensemble_centrality.CENTRALITY_MEASURES[\"mean\"](arr)\n        median = ensemble_centrality.CENTRALITY_MEASURES[\"median\"](arr)\n        robust = ensemble_centrality.CENTRALITY_MEASURES[\"robust_mean\"](arr)\n    assert numpy.isnan(mean[0]) and mean[1] == 1.5\n    assert numpy.isnan(median[0]) and median[1] == 1.5\n    assert numpy.isnan(robust[0]) and robust[1] == 1.5\n"
  },
  {
    "path": "test/test_hyperparameters.py",
    "content": "\n\nfrom mhcflurry.class1_neural_network import Class1NeuralNetwork\n\n\ndef test_all_combinations_of_hyperparameters():\n    combinations_dict = dict(\n        activation=[\"tanh\", \"sigmoid\"],\n        random_negative_constant=[0, 20])\n    results = (\n        Class1NeuralNetwork\n        .hyperparameter_defaults\n        .models_grid(**combinations_dict))\n    assert len(results) == 4\n\nif __name__ == \"__main__\":\n    test_all_combinations_of_hyperparameters()\n"
  },
  {
    "path": "test/test_local_parallelism.py",
    "content": "from argparse import ArgumentParser\n\nimport pytest\n\nfrom mhcflurry.local_parallelism import (\n    add_local_parallelism_args,\n    validate_worker_pool_args,\n    worker_init_kwargs_for_scheduler,\n)\n\n\ndef test_worker_init_kwargs_round_robin_across_gpus():\n    assert worker_init_kwargs_for_scheduler(\n        num_jobs=5,\n        num_gpus=2,\n        backend=\"auto\",\n        max_workers_per_gpu=2,\n    ) == [\n        {\"backend\": \"gpu\", \"gpu_device_nums\": [0]},\n        {\"backend\": \"gpu\", \"gpu_device_nums\": [1]},\n        {\"backend\": \"gpu\", \"gpu_device_nums\": [0]},\n        {\"backend\": \"gpu\", \"gpu_device_nums\": [1]},\n        {\"backend\": \"cpu\", \"gpu_device_nums\": []},\n    ]\n\n\ndef test_worker_init_kwargs_without_gpu_scheduling_uses_backend():\n    assert worker_init_kwargs_for_scheduler(\n        num_jobs=3,\n        num_gpus=0,\n        backend=\"mps\",\n        max_workers_per_gpu=2,\n    ) == [\n        {\"backend\": \"mps\"},\n        {\"backend\": \"mps\"},\n        {\"backend\": \"mps\"},\n    ]\n\n\ndef test_worker_init_kwargs_normalizes_default_backend_alias():\n    assert worker_init_kwargs_for_scheduler(\n        num_jobs=2,\n        num_gpus=0,\n        backend=\"default\",\n        max_workers_per_gpu=2,\n    ) == [\n        {\"backend\": \"auto\"},\n        {\"backend\": \"auto\"},\n    ]\n\n\ndef test_worker_init_kwargs_with_gpus_normalizes_default_backend_alias():\n    assert worker_init_kwargs_for_scheduler(\n        num_jobs=3,\n        num_gpus=1,\n        backend=\"default\",\n        max_workers_per_gpu=2,\n    ) == [\n        {\"backend\": \"gpu\", \"gpu_device_nums\": [0]},\n        {\"backend\": \"gpu\", \"gpu_device_nums\": [0]},\n        {\"backend\": \"cpu\", \"gpu_device_nums\": []},\n    ]\n\n\ndef test_backend_default_alias_parses():\n    parser = ArgumentParser()\n    add_local_parallelism_args(parser)\n    args = parser.parse_args([\"--backend\", \"default\"])\n    assert args.backend == \"default\"\n\n\ndef test_validate_worker_pool_args_requires_parallelism_for_gpus():\n    with pytest.raises(ValueError, match=\"num_jobs > 0\"):\n        validate_worker_pool_args(\n            num_jobs=0,\n            num_gpus=1,\n            backend=\"auto\",\n            max_workers_per_gpu=1,\n        )\n\n\ndef test_validate_worker_pool_args_rejects_non_cuda_backends_for_gpus():\n    with pytest.raises(ValueError, match=\"backend 'auto' or 'gpu'\"):\n        validate_worker_pool_args(\n            num_jobs=2,\n            num_gpus=1,\n            backend=\"mps\",\n            max_workers_per_gpu=1,\n        )\n\n\ndef test_validate_worker_pool_args_rejects_invalid_backend():\n    with pytest.raises(ValueError, match=\"Invalid backend\"):\n        validate_worker_pool_args(\n            num_jobs=2,\n            num_gpus=0,\n            backend=\"gpuu\",\n            max_workers_per_gpu=1,\n        )\n"
  },
  {
    "path": "test/test_master_compat_predictions.py",
    "content": "\"\"\"\nRegression tests for individual neural network architectures.\n\nEach test case bundles a small model config, weights, and expected\npredictions under test/data/.  These expected values were generated by\nthe TF/Keras implementation and verify that the PyTorch code produces\nequivalent outputs for each supported architecture variant (affinity,\npan-allele multiply/concat, densenet, multi-output).\n\"\"\"\nimport json\nimport os\n\nimport numpy as np\nimport pytest\nimport torch\n\nfrom mhcflurry.allele_encoding import AlleleEncoding\nfrom mhcflurry.class1_neural_network import Class1NeuralNetwork\nfrom mhcflurry.common import configure_pytorch, load_weights\nfrom mhcflurry.testing_utils import startup, cleanup\n\n\nMODEL_NAMES = [\n    \"master_affinity\",\n    \"master_pan_multiply\",\n    \"master_pan_concat\",\n    \"master_densenet\",\n    \"master_multi_output\",\n]\n\nDATA_DIR = os.path.join(os.path.dirname(__file__), \"data\")\n\n\ndef setup_module():\n    startup()\n\n\ndef teardown_module():\n    cleanup()\n\n\ndef _load_model_and_expected(name):\n    prefix = name + \"_fixture\"\n    with open(os.path.join(DATA_DIR, f\"{prefix}_config.json\"), \"r\") as f:\n        config = json.load(f)\n    weights = load_weights(os.path.join(DATA_DIR, f\"{prefix}_weights.npz\"))\n    with open(os.path.join(DATA_DIR, f\"{prefix}_predictions.json\"), \"r\") as f:\n        expected = json.load(f)\n    return config, weights, expected\n\n\ndef _predict(config, weights, expected, backend=None):\n    if backend is not None:\n        configure_pytorch(backend=backend)\n    Class1NeuralNetwork.clear_model_cache()\n\n    model = Class1NeuralNetwork.from_config(config, weights=weights)\n    peptides = expected[\"peptides\"]\n\n    if \"alleles\" in expected:\n        allele_encoding = AlleleEncoding(\n            alleles=expected[\"alleles\"],\n            allele_to_sequence=expected[\"allele_to_sequence\"],\n        )\n    else:\n        allele_encoding = None\n\n    return model.predict(peptides, allele_encoding=allele_encoding)\n\n\n@pytest.mark.parametrize(\"model_name\", MODEL_NAMES)\ndef test_predictions_match_expected(model_name):\n    config, weights, expected = _load_model_and_expected(model_name)\n    predicted = _predict(config, weights, expected)\n\n    np.testing.assert_allclose(\n        predicted,\n        np.array(expected[\"predictions\"], dtype=np.float64),\n        rtol=0.01,\n        atol=0.0,\n    )\n\n\n@pytest.mark.parametrize(\"model_name\", MODEL_NAMES)\ndef test_mps_matches_cpu(model_name):\n    if not (hasattr(torch.backends, \"mps\") and torch.backends.mps.is_available()):\n        pytest.skip(\"MPS is not available\")\n\n    config, weights, expected = _load_model_and_expected(model_name)\n    predicted_cpu = np.asarray(\n        _predict(config, weights, expected, backend=\"cpu\"),\n        dtype=np.float64,\n    )\n    predicted_mps = np.asarray(\n        _predict(config, weights, expected, backend=\"mps\"),\n        dtype=np.float64,\n    )\n\n    np.testing.assert_allclose(predicted_mps, predicted_cpu, rtol=1e-5, atol=1e-2)\n"
  },
  {
    "path": "test/test_multi_output.py",
    "content": "import numpy\nimport pandas\nimport pytest\n\nfrom mhcflurry.class1_neural_network import Class1NeuralNetwork\nfrom mhcflurry.common import random_peptides\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\nnumpy.random.seed(0)\n\n@pytest.fixture(scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\n\ndef test_multi_output(setup_module):\n    hyperparameters = dict(\n        loss=\"custom:mse_with_inequalities_and_multiple_outputs\",\n        activation=\"tanh\",\n        layer_sizes=[16],\n        max_epochs=50,\n        minibatch_size=250,\n        random_negative_rate=0.0,\n        random_negative_constant=0.0,\n        early_stopping=False,\n        validation_split=0.0,\n        locally_connected_layers=[\n        ],\n        dense_layer_l1_regularization=0.0,\n        dropout_probability=0.0,\n        optimizer=\"adam\",\n        num_outputs=3)\n\n    df = pandas.DataFrame()\n    df[\"peptide\"] = random_peptides(10000, length=9)\n    df[\"output1\"] = df.peptide.map(lambda s: s[4] == 'K').astype(int) * 49000 + 1\n    df[\"output2\"] = df.peptide.map(lambda s: s[3] == 'Q').astype(int) * 49000 + 1\n    df[\"output3\"] = df.peptide.map(lambda s: s[4] == 'K' or s[3] == 'Q').astype(int) * 49000 + 1\n\n    print(\"output1 mean\", df.output1.mean())\n    print(\"output2 mean\", df.output2.mean())\n\n    stacked = df.set_index(\"peptide\").stack().reset_index()\n    stacked.columns = ['peptide', 'output_name', 'value']\n    stacked[\"output_index\"] = stacked.output_name.map({\n        \"output1\": 0,\n        \"output2\": 1,\n        \"output3\": 2,\n    })\n    assert not stacked.output_index.isnull().any(), stacked\n\n    fit_kwargs = {\n        'verbose': 1,\n    }\n\n    predictor = Class1NeuralNetwork(**hyperparameters)\n    stacked_train = stacked\n    predictor.fit(\n        stacked_train.peptide.values,\n        stacked_train.value.values,\n        output_indices=stacked_train.output_index.values,\n        **fit_kwargs)\n\n    result = predictor.predict(df.peptide.values, output_index=None)\n    print(df.shape, result.shape)\n    print(result)\n\n    df[\"prediction1\"] = result[:,0]\n    df[\"prediction2\"] = result[:,1]\n    df[\"prediction3\"] = result[:,2]\n\n    df_by_peptide = df.set_index(\"peptide\")\n\n    correlation = pandas.DataFrame(\n        numpy.corrcoef(df_by_peptide.T),\n        columns=df_by_peptide.columns,\n        index=df_by_peptide.columns)\n    print(correlation)\n\n    sub_correlation = correlation.loc[\n        [\"output1\", \"output2\", \"output3\"],\n        [\"prediction1\", \"prediction2\", \"prediction3\"],\n    ]\n    assert sub_correlation.iloc[0, 0] > 0.99, correlation\n    assert sub_correlation.iloc[1, 1] > 0.99, correlation\n    assert sub_correlation.iloc[2, 2] > 0.99, correlation\n"
  },
  {
    "path": "test/test_network_merging.py",
    "content": "\nimport numpy\nimport pandas\nimport pytest\n\nfrom mhcflurry import Class1AffinityPredictor, Class1NeuralNetwork\nfrom mhcflurry.common import random_peptides\nfrom mhcflurry.downloads import get_path\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\n\n\ndef setup_module():\n    global PAN_ALLELE_PREDICTOR\n    startup()\n    PAN_ALLELE_PREDICTOR = Class1AffinityPredictor.load(\n        get_path(\"models_class1_pan\", \"models.combined\"),\n        optimization_level=0,)\n\n\ndef teardown_module():\n    global PAN_ALLELE_PREDICTOR\n    PAN_ALLELE_PREDICTOR = None\n    cleanup()\n\n\n@pytest.fixture(scope=\"module\")\ndef predictors():\n    return {\"pan-allele\": PAN_ALLELE_PREDICTOR}\n\n\ndef test_merge(predictors):\n    pan_allele_predictor = predictors['pan-allele']\n\n    assert len(pan_allele_predictor.class1_pan_allele_models) > 1\n    peptides = random_peptides(100, length=9)\n    peptides.extend(random_peptides(100, length=10))\n    peptides = pandas.Series(peptides).sample(frac=1.0)\n\n    alleles = pandas.Series(\n        [\"HLA-A*03:01\", \"HLA-B*57:01\", \"HLA-C*02:01\"]\n    ).sample(n=len(peptides), replace=True)\n\n    predictions1 = pan_allele_predictor.predict(\n        peptides=peptides, alleles=alleles)\n\n    merged = Class1NeuralNetwork.merge(\n        pan_allele_predictor.class1_pan_allele_models)\n    merged_predictor = Class1AffinityPredictor(\n        allele_to_sequence=pan_allele_predictor.allele_to_sequence,\n        class1_pan_allele_models=[merged],\n    )\n    predictions2 = merged_predictor.predict(peptides=peptides, alleles=alleles)\n    numpy.testing.assert_allclose(predictions1, predictions2, atol=0.1)\n"
  },
  {
    "path": "test/test_percent_rank_transform.py",
    "content": "\nimport numpy\n\nfrom mhcflurry.percent_rank_transform import PercentRankTransform\n\nfrom numpy.testing import assert_allclose, assert_equal\n\n\ndef test_percent_rank_transform():\n    model = PercentRankTransform()\n    model.fit(numpy.arange(1000), bins=100)\n    assert_allclose(\n        model.transform([-2, 0, 50, 100, 2000]),\n        [0.0, 0.0, 5.0, 10.0, 100.0],\n        err_msg=str(model.__dict__))\n\n    model2 = PercentRankTransform.from_series(model.to_series())\n    assert_allclose(\n        model2.transform([-2, 0, 50, 100, 2000]),\n        [0.0, 0.0, 5.0, 10.0, 100.0],\n        err_msg=str(model.__dict__))\n\n    assert_equal(model.cdf, model2.cdf)\n    assert_equal(model.bin_edges, model2.bin_edges)\n\n"
  },
  {
    "path": "test/test_predict_command.py",
    "content": "\"\"\"Tests for the predict command.\"\"\"\nimport pytest\n\nimport tempfile\nimport os\n\nimport pandas\n\nimport torch\n\nfrom mhcflurry import predict_command\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\ntorch.manual_seed(1)\n\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    \"\"\"Setup and teardown for each test.\"\"\"\n    startup()\n    yield\n    cleanup()\n\n\nTEST_CSV = '''\nAllele,Peptide,Experiment\nHLA-A0201,SYNFEKKL,17\nHLA-B4403,AAAAAAAAA,17\nHLA-B4403,PPPPPPPP,18\n'''.strip()\n\n\n@pytest.mark.slow\ndef test_csv():\n    args = [\"--allele-column\", \"Allele\", \"--peptide-column\", \"Peptide\"]\n    deletes = []\n    try:\n        with tempfile.NamedTemporaryFile(delete=False, suffix=\".csv\") as fd:\n            fd.write(TEST_CSV.encode())\n            deletes.append(fd.name)\n        fd_out = tempfile.NamedTemporaryFile(delete=False, suffix=\".csv\")\n        deletes.append(fd_out.name)\n        full_args = [fd.name] + args + [\"--out\", fd_out.name]\n        print(\"Running with args: %s\" % full_args)\n        predict_command.run(full_args)\n        result = pandas.read_csv(fd_out.name)\n        print(result)\n        assert not result.isnull().any().any()\n    finally:\n        for delete in deletes:\n            os.unlink(delete)\n\n    assert result.shape == (3, 8)\n\n\n@pytest.mark.slow\ndef test_no_csv():\n    args = [\n        \"--alleles\", \"HLA-A0201\", \"H-2-Kb\",\n        \"--peptides\", \"SIINFEKL\", \"DENDREKLLL\", \"PICKLEEE\",\n        \"--prediction-column-prefix\", \"mhcflurry1_\",\n        \"--affinity-only\",\n    ]\n\n    deletes = []\n    try:\n        fd_out = tempfile.NamedTemporaryFile(delete=False, suffix=\".csv\")\n        deletes.append(fd_out.name)\n        full_args = args + [\"--out\", fd_out.name]\n        print(\"Running with args: %s\" % full_args)\n        predict_command.run(full_args)\n        result = pandas.read_csv(fd_out.name)\n        print(result)\n    finally:\n        for delete in deletes:\n            os.unlink(delete)\n\n    print(result)\n    assert len(result) == 6\n    sub_result1 = result.loc[result.peptide == \"SIINFEKL\"].set_index(\"allele\")\n    print(sub_result1)\n    assert (\n        sub_result1.loc[\"H-2-Kb\"].mhcflurry1_affinity <\n        sub_result1.loc[\"HLA-A0201\"].mhcflurry1_affinity)\n"
  },
  {
    "path": "test/test_predict_scan_command.py",
    "content": "\nimport tempfile\nimport os\n\nimport pandas\nimport pytest\nfrom numpy.testing import assert_array_less\n\nfrom mhcflurry import predict_scan_command\n\nfrom . import data_path\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\n\n\ndef read_output_csv(filename):\n    return pandas.read_csv(\n        filename,\n        converters={\"n_flank\": str, \"c_flank\": str})\n\n\ndef test_fasta():\n    args = [\n        data_path(\"example.fasta\"),\n        \"--alleles\",\n        \"HLA-A*02:01,HLA-A*03:01,HLA-B*57:01,HLA-B*45:01,HLA-C*02:03,HLA-C*07:02\",\n    ]\n    deletes = []\n    try:\n        fd_out = tempfile.NamedTemporaryFile(delete=False, suffix=\".csv\")\n        deletes.append(fd_out.name)\n        full_args = args + [\"--out\", fd_out.name]\n        print(\"Running with args: %s\" % full_args)\n        predict_scan_command.run(full_args)\n        result = read_output_csv(fd_out.name)\n        print(result)\n        assert not result.isnull().any().any()\n    finally:\n        for delete in deletes:\n            os.unlink(delete)\n\n    assert (\n        result.best_allele.nunique() ==\n        6), str(list(result.best_allele.unique()))\n    assert result.sequence_name.nunique() == 3\n    assert_array_less(result.affinity_percentile, 2.0)\n\n\ndef test_fasta_50nm():\n    args = [\n        data_path(\"example.fasta\"),\n        \"--alleles\",\n        \"HLA-A*02:01,HLA-A*03:01,HLA-B*57:01,HLA-B*45:01,HLA-C*02:02,HLA-C*07:02\",\n        \"--threshold-affinity\", \"50\",\n    ]\n    deletes = []\n    try:\n        fd_out = tempfile.NamedTemporaryFile(delete=False, suffix=\".csv\")\n        deletes.append(fd_out.name)\n        full_args = args + [\"--out\", fd_out.name]\n        print(\"Running with args: %s\" % full_args)\n        predict_scan_command.run(full_args)\n        result = read_output_csv(fd_out.name)\n        print(result)\n        assert not result.isnull().any().any()\n    finally:\n        for delete in deletes:\n            os.unlink(delete)\n\n    assert len(result) > 0\n    assert_array_less(result.affinity, 50.0001)\n\n\ndef test_fasta_percentile():\n    args = [\n        data_path(\"example.fasta\"),\n        \"--alleles\",\n        \"HLA-A*02:01,HLA-A*03:01,HLA-B*57:01,HLA-B*45:01,HLA-C*02:02,HLA-C*07:02\",\n        \"--threshold-affinity-percentile\", \"5.0\",\n    ]\n    deletes = []\n    try:\n        fd_out = tempfile.NamedTemporaryFile(delete=False, suffix=\".csv\")\n        deletes.append(fd_out.name)\n        full_args = args + [\"--out\", fd_out.name]\n        print(\"Running with args: %s\" % full_args)\n        predict_scan_command.run(full_args)\n        result = read_output_csv(fd_out.name)\n        print(result)\n        assert not result.isnull().any().any()\n    finally:\n        for delete in deletes:\n            os.unlink(delete)\n\n    assert len(result) > 0\n    assert_array_less(result.affinity_percentile, 5.0001)\n\n\ndef test_commandline_sequences():\n    args = [\n        \"--sequences\", \"ASDFGHKL\", \"QWERTYIPCVNM\",\n        \"--alleles\", \"HLA-A0201,HLA-A0301\", \"H-2-Kb\",\n        \"--peptide-lengths\", \"8\",\n        \"--results-all\",\n    ]\n\n    deletes = []\n    try:\n        fd_out = tempfile.NamedTemporaryFile(delete=False, suffix=\".csv\")\n        deletes.append(fd_out.name)\n        full_args = args + [\"--out\", fd_out.name]\n        print(\"Running with args: %s\" % full_args)\n        predict_scan_command.run(full_args)\n        result = read_output_csv(fd_out.name)\n        print(result)\n    finally:\n        for delete in deletes:\n            os.unlink(delete)\n\n    print(result)\n\n    assert result.sequence_name.nunique() == 2\n    assert result.best_allele.nunique() == 3\n    assert result.sample_name.nunique() == 2\n    assert (result.peptide == \"ASDFGHKL\").sum() == 2\n    assert (result.peptide != \"ASDFGHKL\").sum() == 10\n"
  },
  {
    "path": "test/test_pytorch_coverage.py",
    "content": "\"\"\"\nUnit tests to increase coverage of PyTorch migration code.\n\nCovers:\n- pytorch_losses: get_pytorch_loss registry, StandardLoss, sample weights\n- pytorch_layers: get_activation, LocallyConnected1D numerics\n- ensemble_centrality: edge cases (<=3 cols, all-NaN, median)\n- class1_neural_network: weight init variants, MergedClass1NeuralNetwork,\n  skip-connections topology\n- class1_affinity_predictor: canonicalize_allele_name round-trip\n\"\"\"\nimport warnings\nimport numpy as np\nimport pytest\nimport torch\n\nfrom mhcflurry.testing_utils import startup, cleanup\n\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    startup()\n    yield\n    cleanup()\n\n\n# ── pytorch_losses ──────────────────────────────────────────────────────────\n\n\nclass TestGetPytorchLoss:\n    def test_standard_mse(self):\n        from mhcflurry.pytorch_losses import get_pytorch_loss\n        loss = get_pytorch_loss(\"mse\")\n        assert not loss.supports_inequalities\n        pred = torch.tensor([0.5])\n        target = torch.tensor([0.3])\n        val = loss(pred, target).item()\n        assert abs(val - 0.04) < 1e-6\n\n    def test_standard_mae(self):\n        from mhcflurry.pytorch_losses import get_pytorch_loss\n        loss = get_pytorch_loss(\"mae\")\n        pred = torch.tensor([0.5])\n        target = torch.tensor([0.3])\n        val = loss(pred, target).item()\n        assert abs(val - 0.2) < 1e-6\n\n    def test_custom_loss_lookup(self):\n        from mhcflurry.pytorch_losses import get_pytorch_loss\n        loss = get_pytorch_loss(\"custom:mse_with_inequalities\")\n        assert loss.supports_inequalities\n        assert not loss.supports_multiple_outputs\n\n    def test_custom_multi_output_lookup(self):\n        from mhcflurry.pytorch_losses import get_pytorch_loss\n        loss = get_pytorch_loss(\"custom:mse_with_inequalities_and_multiple_outputs\")\n        assert loss.supports_inequalities\n        assert loss.supports_multiple_outputs\n\n    def test_custom_mass_spec_lookup(self):\n        from mhcflurry.pytorch_losses import get_pytorch_loss\n        loss = get_pytorch_loss(\"custom:multiallelic_mass_spec_loss\")\n        assert loss.supports_inequalities\n\n    def test_unknown_standard_loss_raises(self):\n        from mhcflurry.pytorch_losses import get_pytorch_loss\n        with pytest.raises(ValueError, match=\"Unknown standard loss\"):\n            get_pytorch_loss(\"huber\")\n\n    def test_unknown_custom_loss_raises(self):\n        from mhcflurry.pytorch_losses import get_pytorch_loss\n        with pytest.raises(ValueError, match=\"No such custom loss\"):\n            get_pytorch_loss(\"custom:nonexistent\")\n\n\nclass TestStandardLossWeighted:\n    def test_mse_with_sample_weights(self):\n        from mhcflurry.pytorch_losses import StandardLoss\n        loss = StandardLoss(\"mse\")\n        pred = torch.tensor([0.5, 0.3])\n        target = torch.tensor([0.5, 0.0])\n        weights = torch.tensor([0.0, 1.0])\n        val = loss(pred, target, sample_weights=weights).item()\n        # Only second sample contributes: 0.3^2 * 1.0 / 1.0\n        assert abs(val - 0.09) < 1e-6\n\n    def test_mae_with_sample_weights(self):\n        from mhcflurry.pytorch_losses import StandardLoss\n        loss = StandardLoss(\"mae\")\n        pred = torch.tensor([0.5, 0.3])\n        target = torch.tensor([0.5, 0.0])\n        weights = torch.tensor([0.0, 1.0])\n        val = loss(pred, target, sample_weights=weights).item()\n        assert abs(val - 0.3) < 1e-6\n\n    def test_mse_column_vector_predictions_do_not_warn(self):\n        from mhcflurry.pytorch_losses import StandardLoss\n        loss = StandardLoss(\"mse\")\n        pred = torch.tensor([[0.5], [0.3]])\n        target = torch.tensor([0.5, 0.0])\n        with warnings.catch_warnings(record=True) as caught:\n            warnings.simplefilter(\"always\")\n            val = loss(pred, target).item()\n        assert abs(val - 0.045) < 1e-6\n        assert not caught\n\n\nclass TestMSEWithInequalitiesSampleWeights:\n    def test_weighted_equality_loss(self):\n        from mhcflurry.pytorch_losses import MSEWithInequalities\n        loss_fn = MSEWithInequalities()\n        # Two equality targets, weight only the first\n        encoded = MSEWithInequalities.encode_y([0.5, 0.5], [\"=\", \"=\"])\n        y_pred = torch.tensor([0.7, 0.9])\n        y_true = torch.tensor(encoded)\n        weights = torch.tensor([1.0, 0.0])\n        val = loss_fn(y_pred, y_true, sample_weights=weights).item()\n        # Only first sample: (0.7 - 0.5)^2 * 1.0 / 1.0 = 0.04\n        assert abs(val - 0.04) < 1e-6\n\n    def test_encode_y_nan_raises(self):\n        from mhcflurry.pytorch_losses import MSEWithInequalities\n        with pytest.raises(ValueError, match=\"NaN\"):\n            MSEWithInequalities.encode_y([float(\"nan\")])\n\n    def test_encode_y_length_mismatch_raises(self):\n        from mhcflurry.pytorch_losses import MSEWithInequalities\n        with pytest.raises(ValueError, match=\"same length\"):\n            MSEWithInequalities.encode_y([0.5, 0.5], [\"=\"])\n\n\nclass TestMSEMultiOutputSampleWeights:\n    def test_weighted_multi_output(self):\n        from mhcflurry.pytorch_losses import MSEWithInequalitiesAndMultipleOutputs\n        loss_fn = MSEWithInequalitiesAndMultipleOutputs()\n        encoded = loss_fn.encode_y([0.5, 0.5], output_indices=[0, 1])\n        y_pred = torch.tensor([[0.7, 999.0], [999.0, 0.5]])\n        y_true = torch.tensor(encoded)\n        weights = torch.tensor([1.0, 0.0])\n        val = loss_fn(y_pred, y_true, sample_weights=weights).item()\n        # Only first sample, output 0: (0.7 - 0.5)^2 = 0.04\n        assert abs(val - 0.04) < 1e-6\n\n    def test_encode_y_negative_output_indices_raises(self):\n        from mhcflurry.pytorch_losses import MSEWithInequalitiesAndMultipleOutputs\n        with pytest.raises(ValueError, match=\"Invalid output indices\"):\n            MSEWithInequalitiesAndMultipleOutputs.encode_y(\n                [0.5], output_indices=[-1])\n\n    def test_encode_y_output_indices_shape_mismatch_raises(self):\n        from mhcflurry.pytorch_losses import MSEWithInequalitiesAndMultipleOutputs\n        with pytest.raises(ValueError, match=\"Expected output_indices\"):\n            MSEWithInequalitiesAndMultipleOutputs.encode_y(\n                [0.5, 0.5], output_indices=[0])\n\n\nclass TestMultiallelicMassSpecEdgeCases:\n    def test_no_hits_returns_zero(self):\n        from mhcflurry.pytorch_losses import MultiallelicMassSpecLoss\n        loss_fn = MultiallelicMassSpecLoss(delta=0.2)\n        y_pred = torch.tensor([[0.5, 0.3]], requires_grad=True)\n        y_true = torch.tensor([0.0])  # only decoys\n        val = loss_fn(y_pred, y_true)\n        assert val.item() == 0.0\n        val.backward()  # should not error\n\n    def test_no_decoys_returns_zero(self):\n        from mhcflurry.pytorch_losses import MultiallelicMassSpecLoss\n        loss_fn = MultiallelicMassSpecLoss(delta=0.2)\n        y_pred = torch.tensor([[0.5, 0.3]], requires_grad=True)\n        y_true = torch.tensor([1.0])  # only hits\n        val = loss_fn(y_pred, y_true)\n        assert val.item() == 0.0\n        val.backward()\n\n\n# ── pytorch_layers ──────────────────────────────────────────────────────────\n\n\nclass TestGetActivation:\n    def test_tanh(self):\n        from mhcflurry.pytorch_layers import get_activation\n        act = get_activation(\"tanh\")\n        x = torch.tensor([0.0])\n        assert act(x).item() == 0.0\n\n    def test_sigmoid(self):\n        from mhcflurry.pytorch_layers import get_activation\n        act = get_activation(\"sigmoid\")\n        assert abs(act(torch.tensor([0.0])).item() - 0.5) < 1e-6\n\n    def test_relu(self):\n        from mhcflurry.pytorch_layers import get_activation\n        act = get_activation(\"relu\")\n        assert act(torch.tensor([-1.0])).item() == 0.0\n        assert act(torch.tensor([1.0])).item() == 1.0\n\n    def test_linear_returns_none(self):\n        from mhcflurry.pytorch_layers import get_activation\n        assert get_activation(\"linear\") is None\n        assert get_activation(\"\") is None\n\n    def test_unknown_raises(self):\n        from mhcflurry.pytorch_layers import get_activation\n        with pytest.raises(ValueError, match=\"Unknown activation\"):\n            get_activation(\"swish\")\n\n\nclass TestLocallyConnected1D:\n    def test_output_shape(self):\n        from mhcflurry.pytorch_layers import LocallyConnected1D\n        lc = LocallyConnected1D(\n            in_channels=3, out_channels=5, input_length=10, kernel_size=3\n        )\n        x = torch.randn(2, 10, 3)\n        out = lc(x)\n        assert out.shape == (2, 8, 5)\n\n    def test_deterministic_forward(self):\n        from mhcflurry.pytorch_layers import LocallyConnected1D\n        torch.manual_seed(42)\n        lc = LocallyConnected1D(\n            in_channels=2, out_channels=1, input_length=4, kernel_size=2,\n            activation=\"linear\",\n        )\n        x = torch.ones(1, 4, 2)\n        out = lc(x)\n        # With linear activation, output = einsum + bias, should be deterministic\n        out2 = lc(x)\n        assert torch.allclose(out, out2)\n\n\n# ── ensemble_centrality ─────────────────────────────────────────────────────\n\n\nclass TestEnsembleCentralityEdgeCases:\n    def test_robust_mean_falls_back_to_nanmean_for_few_columns(self):\n        from mhcflurry.ensemble_centrality import robust_mean, _nanmean_no_warnings\n        arr = np.array([[1.0, 2.0, 3.0]])  # 3 columns => fallback\n        result = robust_mean(arr)\n        expected = _nanmean_no_warnings(arr)\n        np.testing.assert_array_equal(result, expected)\n\n    def test_robust_mean_two_columns(self):\n        from mhcflurry.ensemble_centrality import robust_mean\n        arr = np.array([[10.0, 20.0]])\n        result = robust_mean(arr)\n        assert result[0] == 15.0\n\n    def test_robust_mean_all_nan_many_columns(self):\n        from mhcflurry.ensemble_centrality import robust_mean\n        arr = np.array([[np.nan, np.nan, np.nan, np.nan, np.nan]])\n        result = robust_mean(arr)\n        assert np.isnan(result[0])\n\n    def test_nanmedian_mixed_nans(self):\n        from mhcflurry.ensemble_centrality import _nanmedian_no_warnings\n        arr = np.array([\n            [1.0, 3.0, np.nan],\n            [np.nan, np.nan, np.nan],\n            [5.0, 5.0, 5.0],\n        ])\n        result = _nanmedian_no_warnings(arr)\n        assert result[0] == 2.0\n        assert np.isnan(result[1])\n        assert result[2] == 5.0\n\n    def test_nanmean_single_value_per_row(self):\n        from mhcflurry.ensemble_centrality import _nanmean_no_warnings\n        arr = np.array([\n            [np.nan, 7.0, np.nan],\n        ])\n        result = _nanmean_no_warnings(arr)\n        assert result[0] == 7.0\n\n    def test_centrality_measures_dict(self):\n        from mhcflurry.ensemble_centrality import CENTRALITY_MEASURES\n        assert set(CENTRALITY_MEASURES.keys()) == {\"mean\", \"median\", \"robust_mean\"}\n\n\n# ── class1_neural_network: weight initialization ────────────────────────────\n\n\nclass TestWeightInitialization:\n    def _make_model(self, init):\n        from mhcflurry.class1_neural_network import (\n            Class1NeuralNetwork,\n            Class1NeuralNetworkModel,\n        )\n        nn_obj = Class1NeuralNetwork()\n        peptide_shape = nn_obj.peptides_to_network_input([]).shape[1:]\n        return Class1NeuralNetworkModel(\n            peptide_encoding_shape=peptide_shape,\n            layer_sizes=[16],\n            init=init,\n        )\n\n    def test_glorot_uniform(self):\n        model = self._make_model(\"glorot_uniform\")\n        assert model.output_layer.weight.shape[0] == 1\n\n    def test_glorot_normal(self):\n        model = self._make_model(\"glorot_normal\")\n        assert model.output_layer.weight.shape[0] == 1\n\n    def test_he_uniform(self):\n        model = self._make_model(\"he_uniform\")\n        assert model.output_layer.weight.shape[0] == 1\n\n    def test_he_normal(self):\n        model = self._make_model(\"he_normal\")\n        assert model.output_layer.weight.shape[0] == 1\n\n    def test_biases_are_zero(self):\n        model = self._make_model(\"glorot_uniform\")\n        for name, param in model.named_parameters():\n            if \"bias\" in name:\n                assert torch.all(param == 0), f\"Non-zero bias in {name}\"\n\n\n# ── class1_neural_network: MergedClass1NeuralNetwork ────────────────────────\n\n\nclass TestMergedClass1NeuralNetwork:\n    def _make_merged(self, merge_method, n_networks=2):\n        from mhcflurry.class1_neural_network import (\n            Class1NeuralNetwork,\n            Class1NeuralNetworkModel,\n            MergedClass1NeuralNetwork,\n        )\n        nn_obj = Class1NeuralNetwork()\n        peptide_shape = nn_obj.peptides_to_network_input([]).shape[1:]\n        torch.manual_seed(0)\n        networks = []\n        for _ in range(n_networks):\n            net = Class1NeuralNetworkModel(\n                peptide_encoding_shape=peptide_shape,\n                layer_sizes=[4],\n            )\n            networks.append(net)\n        return MergedClass1NeuralNetwork(networks, merge_method=merge_method)\n\n    def test_average(self):\n        merged = self._make_merged(\"average\")\n        inp = {\"peptide\": torch.randn(3, *merged.networks[0].peptide_encoding_shape)}\n        out = merged(inp)\n        # Average of 2 networks\n        individual = [net(inp) for net in merged.networks]\n        expected = torch.stack(individual, dim=-1).mean(dim=-1)\n        assert torch.allclose(out, expected, atol=1e-6)\n\n    def test_sum(self):\n        merged = self._make_merged(\"sum\")\n        inp = {\"peptide\": torch.randn(3, *merged.networks[0].peptide_encoding_shape)}\n        out = merged(inp)\n        individual = [net(inp) for net in merged.networks]\n        expected = torch.stack(individual, dim=-1).sum(dim=-1)\n        assert torch.allclose(out, expected, atol=1e-6)\n\n    def test_concatenate(self):\n        merged = self._make_merged(\"concatenate\")\n        inp = {\"peptide\": torch.randn(3, *merged.networks[0].peptide_encoding_shape)}\n        out = merged(inp)\n        individual = [net(inp) for net in merged.networks]\n        expected = torch.cat(individual, dim=-1)\n        assert torch.allclose(out, expected, atol=1e-6)\n\n    def test_unknown_merge_method_raises(self):\n        merged = self._make_merged(\"average\")\n        merged.merge_method = \"bad\"\n        inp = {\"peptide\": torch.randn(1, *merged.networks[0].peptide_encoding_shape)}\n        with pytest.raises(ValueError, match=\"Unknown merge method\"):\n            merged(inp)\n\n    def test_get_set_weights_roundtrip(self):\n        merged = self._make_merged(\"average\")\n        weights = merged.get_weights_list()\n        assert len(weights) > 0\n        # Setting the same weights back should not change outputs\n        inp = {\"peptide\": torch.randn(2, *merged.networks[0].peptide_encoding_shape)}\n        out_before = merged(inp).detach().clone()\n        merged.set_weights_list(weights)\n        out_after = merged(inp)\n        assert torch.allclose(out_before, out_after, atol=1e-6)\n\n\n# ── class1_neural_network: skip-connections topology ────────────────────────\n\n\nclass TestSkipConnectionsTopology:\n    def test_forward_pass(self):\n        from mhcflurry.class1_neural_network import (\n            Class1NeuralNetwork,\n            Class1NeuralNetworkModel,\n        )\n        nn_obj = Class1NeuralNetwork()\n        peptide_shape = nn_obj.peptides_to_network_input([]).shape[1:]\n        torch.manual_seed(7)\n        model = Class1NeuralNetworkModel(\n            peptide_encoding_shape=peptide_shape,\n            layer_sizes=[8, 8, 4],\n            topology=\"with-skip-connections\",\n        )\n        inp = {\"peptide\": torch.randn(2, *peptide_shape)}\n        out = model(inp)\n        assert out.shape == (2, 1)\n\n    def test_different_from_feedforward(self):\n        from mhcflurry.class1_neural_network import (\n            Class1NeuralNetwork,\n            Class1NeuralNetworkModel,\n        )\n        nn_obj = Class1NeuralNetwork()\n        peptide_shape = nn_obj.peptides_to_network_input([]).shape[1:]\n\n        torch.manual_seed(99)\n        skip_model = Class1NeuralNetworkModel(\n            peptide_encoding_shape=peptide_shape,\n            layer_sizes=[8, 8],\n            topology=\"with-skip-connections\",\n        )\n        torch.manual_seed(99)\n        ff_model = Class1NeuralNetworkModel(\n            peptide_encoding_shape=peptide_shape,\n            layer_sizes=[8, 8],\n            topology=\"feedforward\",\n        )\n        inp = {\"peptide\": torch.randn(2, *peptide_shape)}\n        out_skip = skip_model(inp)\n        out_ff = ff_model(inp)\n        # Different topologies should give different outputs (skip model has\n        # different second-layer input dim so weights differ)\n        assert not torch.allclose(out_skip, out_ff, atol=1e-4)\n\n\n# ── class1_affinity_predictor: canonicalize_allele_name ─────────────────────\n\n\nclass TestCanonicalizeAlleleName:\n    def test_common_alleles_roundtrip(self):\n        \"\"\"Known HLA alleles should round-trip through canonicalize_allele_name.\"\"\"\n        from mhcflurry.common import normalize_allele_name\n        alleles = [\n            \"HLA-A*02:01\", \"HLA-A*01:01\", \"HLA-B*07:02\",\n            \"HLA-B*44:02\", \"HLA-C*07:01\",\n        ]\n        for allele in alleles:\n            result = normalize_allele_name(allele, use_allele_aliases=False)\n            assert result == allele, f\"{allele} -> {result}\"\n\n    def test_aliases_false_avoids_remapping(self):\n        \"\"\"With aliases=False, HLA-C*01:01 should not remap to C*01:02.\"\"\"\n        from mhcflurry.common import normalize_allele_name\n        result = normalize_allele_name(\n            \"HLA-C*01:01\", use_allele_aliases=False)\n        assert result == \"HLA-C*01:01\"\n\n    def test_normalize_raises_on_invalid(self):\n        from mhcflurry.common import normalize_allele_name\n        with pytest.raises(ValueError, match=\"Invalid MHC allele name\"):\n            normalize_allele_name(\"INVALID_ALLELE_NAME\")\n\n    def test_normalize_returns_default_on_invalid(self):\n        from mhcflurry.common import normalize_allele_name\n        result = normalize_allele_name(\n            \"INVALID_ALLELE_NAME\", raise_on_error=False, default_value=\"NONE\")\n        assert result == \"NONE\"\n\n    def test_forbidden_substring_raises(self):\n        from mhcflurry.common import normalize_allele_name\n        with pytest.raises(ValueError, match=\"Unsupported gene\"):\n            normalize_allele_name(\"HLA-MIC-A\")\n\n    def test_forbidden_substring_returns_default(self):\n        from mhcflurry.common import normalize_allele_name\n        result = normalize_allele_name(\n            \"HLA-MIC-A\", raise_on_error=False, default_value=\"SKIP\")\n        assert result == \"SKIP\"\n\n\n# ── common.py: configure_pytorch ────────────────────────────────────────────\n\n\nclass TestConfigurePyTorch:\n    def test_reconfigure_backend(self):\n        from mhcflurry import common\n        old_backend = common._pytorch_backend\n        common.configure_pytorch(backend=\"cpu\")\n        assert common._pytorch_backend == \"cpu\"\n        common.configure_pytorch(backend=\"auto\")\n        assert common._pytorch_backend == \"auto\"\n        common._pytorch_backend = old_backend\n\n    def test_invalid_backend_raises(self):\n        from mhcflurry import common\n        with pytest.raises(ValueError, match=\"Invalid backend\"):\n            common.configure_pytorch(backend=\"gpuu\")\n\n    def test_default_backend_alias_maps_to_auto(self):\n        from mhcflurry import common\n        old_backend = common._pytorch_backend\n        try:\n            common.configure_pytorch(backend=\"default\")\n            assert common._pytorch_backend == \"auto\"\n        finally:\n            common._pytorch_backend = old_backend\n\n    def test_configure_tensorflow_cpu_backend_maps_to_cpu(self):\n        from mhcflurry import common\n        old_backend = common._pytorch_backend\n        try:\n            with pytest.warns(FutureWarning, match=\"configure_tensorflow\"):\n                common.configure_tensorflow(backend=\"tensorflow-cpu\")\n            assert common._pytorch_backend == \"cpu\"\n            assert str(common.get_pytorch_device()) == \"cpu\"\n        finally:\n            common._pytorch_backend = old_backend\n\n    def test_configure_tensorflow_default_alias_maps_to_auto(self):\n        from mhcflurry import common\n        old_backend = common._pytorch_backend\n        try:\n            common.configure_pytorch(backend=\"cpu\")\n            with pytest.warns(FutureWarning, match=\"configure_tensorflow\"):\n                common.configure_tensorflow(backend=\"tensorflow-default\")\n            assert common._pytorch_backend == \"auto\"\n        finally:\n            common._pytorch_backend = old_backend\n\n    def test_configure_tensorflow_gpu_backend_maps_to_gpu(self):\n        from mhcflurry import common\n        old_backend = common._pytorch_backend\n        try:\n            with pytest.warns(FutureWarning, match=\"configure_tensorflow\"):\n                common.configure_tensorflow(backend=\"tensorflow-gpu\")\n            assert common._pytorch_backend == \"gpu\"\n            if not torch.cuda.is_available():\n                with pytest.raises(RuntimeError, match=\"CUDA is not available\"):\n                    common.get_pytorch_device()\n        finally:\n            common._pytorch_backend = old_backend\n"
  },
  {
    "path": "test/test_pytorch_regressions.py",
    "content": "\"\"\"\nRegression tests for PyTorch conversion gaps vs master behavior.\n\"\"\"\nimport json\nimport os\nimport random\n\nimport pytest\n\nimport numpy as np\nimport torch\n\nfrom mhcflurry.class1_neural_network import (\n    Class1NeuralNetwork,\n    Class1NeuralNetworkModel,\n    MergedClass1NeuralNetwork,\n)\nfrom mhcflurry.class1_processing_neural_network import (\n    Class1ProcessingModel,\n    Class1ProcessingNeuralNetwork,\n)\nfrom mhcflurry.common import load_weights\nfrom mhcflurry.flanking_encoding import FlankingEncoding\nfrom mhcflurry.pytorch_losses import (\n    MSEWithInequalities,\n    MultiallelicMassSpecLoss,\n)\nfrom mhcflurry.testing_utils import startup, cleanup\n\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    startup()\n    yield\n    cleanup()\n\n\ndef _make_simple_affinity_model(**overrides):\n    hyperparameters = dict(\n        activation=\"tanh\",\n        layer_sizes=[4],\n        locally_connected_layers=[],\n        peptide_dense_layer_sizes=[],\n        allele_dense_layer_sizes=[],\n        dropout_probability=0.0,\n        batch_normalization=False,\n        dense_layer_l1_regularization=0.0,\n        dense_layer_l2_regularization=0.0,\n        max_epochs=5,\n        early_stopping=False,\n        validation_split=0.0,\n        minibatch_size=2,\n        optimizer=\"sgd\",\n        learning_rate=0.1,\n        random_negative_rate=0.0,\n        random_negative_constant=0,\n    )\n    hyperparameters.update(overrides)\n    return Class1NeuralNetwork(**hyperparameters)\n\n\ndef _make_allele_representations(num_alleles=2):\n    return np.arange(num_alleles * 6, dtype=np.float32).reshape(num_alleles, 2, 3)\n\n\ndef _seed_all(seed=1):\n    np.random.seed(seed)\n    random.seed(seed)\n    torch.manual_seed(seed)\n\n\ndef test_sample_weights_affect_training():\n    peptides = [\"AAAAAAAAA\", \"CCCCCCCCC\"]\n    affinities = np.array([50.0, 50000.0])\n    weights = np.array([1000.0, 1.0])\n\n    _seed_all(7)\n    model_unweighted = _make_simple_affinity_model(max_epochs=25)\n    model_unweighted.fit(\n        peptides,\n        affinities,\n        shuffle_permutation=[0, 1],\n    )\n    pred_unweighted = model_unweighted.predict(peptides)\n\n    _seed_all(7)\n    model_weighted = _make_simple_affinity_model(max_epochs=25)\n    model_weighted.fit(\n        peptides,\n        affinities,\n        sample_weights=weights,\n        shuffle_permutation=[0, 1],\n    )\n    pred_weighted = model_weighted.predict(peptides)\n\n    # With sample weights, training should diverge from the unweighted case.\n    assert not np.allclose(pred_unweighted, pred_weighted, rtol=0.01, atol=0.0)\n\n\ndef test_validation_split_is_fixed_when_lr_zero():\n    peptides = [\"AAAAAAAAA\", \"CCCCCCCCC\", \"DDDDDDDDD\", \"EEEEEEEEE\"]\n    affinities = np.array([50.0, 50000.0, 50000.0, 50.0])\n\n    _seed_all(3)\n    model = _make_simple_affinity_model(\n        learning_rate=0.0,\n        max_epochs=3,\n        validation_split=0.5,\n        early_stopping=False,\n    )\n    model.fit(\n        peptides,\n        affinities,\n        shuffle_permutation=[0, 1, 2, 3],\n    )\n    val_losses = model.fit_info[-1][\"val_loss\"]\n    assert len(val_losses) >= 2\n    # With fixed validation split and zero learning rate, val loss should be constant.\n    assert np.allclose(val_losses, val_losses[0], rtol=0.0, atol=1e-6)\n\n\ndef test_dropout_probability_is_keep_prob():\n    nn = Class1NeuralNetwork()\n    peptide_shape = nn.peptides_to_network_input([]).shape[1:]\n    model = Class1NeuralNetworkModel(\n        peptide_encoding_shape=peptide_shape,\n        dropout_probability=0.8,\n    )\n    assert model.dropouts[0] is not None\n    # In master, dropout_probability is a keep probability, so p should be 0.2.\n    assert model.dropouts[0].p == pytest.approx(0.2, abs=1e-6)\n\n\ndef test_batch_norm_uses_keras_defaults():\n    nn = Class1NeuralNetwork()\n    peptide_shape = nn.peptides_to_network_input([]).shape[1:]\n    model = Class1NeuralNetworkModel(\n        peptide_encoding_shape=peptide_shape,\n        batch_normalization=True,\n        layer_sizes=[4],\n    )\n\n    assert model.batch_norm_early is not None\n    assert model.batch_norm_early.eps == pytest.approx(1e-3, abs=1e-12)\n    assert model.batch_norm_early.momentum == pytest.approx(0.01, abs=1e-12)\n    assert model.batch_norms[0] is not None\n    assert model.batch_norms[0].eps == pytest.approx(1e-3, abs=1e-12)\n    assert model.batch_norms[0].momentum == pytest.approx(0.01, abs=1e-12)\n\n\ndef test_processing_dropout_is_spatial():\n    model = Class1ProcessingModel(\n        sequence_dims=(10, 3),\n        n_flank_length=1,\n        c_flank_length=1,\n        peptide_max_length=8,\n        flanking_averages=False,\n        convolutional_filters=3,\n        convolutional_kernel_size=1,\n        convolutional_activation=\"tanh\",\n        convolutional_kernel_l1_l2=[0.0, 0.0],\n        dropout_rate=0.5,\n        post_convolutional_dense_layer_sizes=[],\n    )\n    assert model.dropout is not None\n    model.train()\n    _seed_all(11)\n\n    x = torch.ones((1, 3, 10))\n    dropped = model.dropout(x)\n    mask = (dropped != 0)\n\n    # Spatial dropout should use one mask per channel across all positions.\n    for c in range(mask.shape[1]):\n        assert torch.all(mask[0, c, :] == mask[0, c, 0])\n\n\ndef test_processing_flank_averages_use_tf_masked_mean_semantics():\n    model = Class1ProcessingModel(\n        sequence_dims=(7, 1),\n        n_flank_length=2,\n        c_flank_length=2,\n        peptide_max_length=3,\n        flanking_averages=True,\n        convolutional_filters=1,\n        convolutional_kernel_size=1,\n        convolutional_activation=\"tanh\",\n        convolutional_kernel_l1_l2=[0.0, 0.0],\n        dropout_rate=0.0,\n        post_convolutional_dense_layer_sizes=[],\n    )\n\n    # With TF semantics, masked averaging is computed via:\n    # mean((x + 1) * mask, axis=sequence_axis) - 1\n    # i.e., denominator is full sequence length, not number of masked positions.\n    conv_result = torch.ones((1, 7, 1))\n    peptide_length = torch.tensor([[3]])\n\n    n_avg = model._extract_n_flank_avg(conv_result)\n    c_avg = model._extract_c_flank_avg(conv_result, peptide_length)\n\n    expected = (2 * (1.0 + 1.0) / 7.0) - 1.0\n    assert n_avg.item() == pytest.approx(expected, abs=1e-7)\n    assert c_avg.item() == pytest.approx(expected, abs=1e-7)\n\n\ndef test_mse_with_inequalities_rejects_out_of_range_targets():\n    with pytest.raises(ValueError):\n        MSEWithInequalities.encode_y([1.1], inequalities=[\"=\"])\n    with pytest.raises(ValueError):\n        MSEWithInequalities.encode_y([-0.1], inequalities=[\"=\"])\n\n\ndef test_mse_with_inequalities_rejects_invalid_inequality():\n    with pytest.raises(ValueError):\n        MSEWithInequalities.encode_y([0.5], inequalities=[\"?\"])\n\n\ndef test_multiallelic_mass_spec_encode_y_validates_values():\n    with pytest.raises(AssertionError):\n        MultiallelicMassSpecLoss.encode_y([0.5])\n\n\ndef test_merge_allele_specific_raises_not_implemented():\n    _seed_all(5)\n    model_a = _make_simple_affinity_model(max_epochs=1)\n    model_b = _make_simple_affinity_model(max_epochs=1)\n\n    # Ensure networks exist, matching master expectations for merge().\n    model_a._network = model_a.make_network(\n        allele_representations=None,\n        **model_a.network_hyperparameter_defaults.subselect(model_a.hyperparameters)\n    )\n    model_b._network = model_b.make_network(\n        allele_representations=None,\n        **model_b.network_hyperparameter_defaults.subselect(model_b.hyperparameters)\n    )\n    with pytest.raises(NotImplementedError):\n        Class1NeuralNetwork.merge([model_a, model_b])\n\n\ndef test_merged_network_serialization_preserves_dropout_keep_probability():\n    _seed_all(23)\n    allele_representations = np.zeros((2, 3, 4), dtype=np.float32)\n\n    models = []\n    for _ in range(2):\n        model = Class1NeuralNetwork(\n            dropout_probability=0.8,\n            layer_sizes=[4],\n            allele_dense_layer_sizes=[],\n            peptide_dense_layer_sizes=[],\n            locally_connected_layers=[],\n            batch_normalization=False,\n            dense_layer_l1_regularization=0.0,\n            dense_layer_l2_regularization=0.0,\n        )\n        model._network = model.make_network(\n            allele_representations=allele_representations,\n            **model.network_hyperparameter_defaults.subselect(model.hyperparameters)\n        )\n        models.append(model)\n\n    merged = Class1NeuralNetwork.merge(models)\n    config = merged.get_config()\n    roundtripped = Class1NeuralNetwork.from_config(\n        config,\n        weights=merged.get_weights(),\n    )\n    network = roundtripped.network()\n\n    assert isinstance(network, MergedClass1NeuralNetwork)\n    for subnet in network.networks:\n        assert subnet.dropout_probability == pytest.approx(0.8, abs=1e-12)\n        assert subnet.dropouts[0] is not None\n        assert subnet.dropouts[0].p == pytest.approx(0.2, abs=1e-12)\n\n\ndef test_dense_regularization_excludes_output_layer():\n    peptides = [\"AAAAAAAAA\", \"CCCCCCCCC\"]\n\n    _seed_all(17)\n    model = _make_simple_affinity_model(\n        layer_sizes=[],\n        dense_layer_l1_regularization=0.1,\n        dense_layer_l2_regularization=0.2,\n        max_epochs=1,\n        validation_split=0.0,\n        early_stopping=False,\n    )\n\n    model._network = model.make_network(\n        allele_representations=None,\n        **model.network_hyperparameter_defaults.subselect(model.hyperparameters)\n    )\n\n    affinities = model.predict(peptides)\n    weights_before = [p.detach().cpu().clone() for p in model.network().parameters()]\n\n    model.fit(\n        peptides,\n        affinities,\n        shuffle_permutation=[0, 1],\n    )\n\n    weights_after = [p.detach().cpu().clone() for p in model.network().parameters()]\n    for before, after in zip(weights_before, weights_after):\n        assert torch.allclose(before, after, rtol=0.0, atol=1e-7)\n\n\ndef test_processing_validation_uses_last_fraction_and_sample_weights():\n    _seed_all(19)\n    model = Class1ProcessingNeuralNetwork(\n        max_epochs=1,\n        validation_split=0.5,\n        early_stopping=False,\n        learning_rate=0.0,\n        minibatch_size=2,\n        dropout_rate=0.0,\n        flanking_averages=False,\n        convolutional_kernel_l1_l2=[0.0, 0.0],\n        convolutional_filters=2,\n        convolutional_kernel_size=1,\n        n_flank_length=1,\n        c_flank_length=1,\n        peptide_max_length=8,\n    )\n\n    sequences = FlankingEncoding(\n        peptides=[\"AAAAAAAA\", \"CCCCCCCC\", \"DDDDDDDD\", \"EEEEEEEE\"],\n        n_flanks=[\"Q\", \"R\", \"S\", \"T\"],\n        c_flanks=[\"V\", \"W\", \"Y\", \"A\"],\n    )\n    targets = np.array([1.0, 0.0, 1.0, 0.0], dtype=np.float32)\n    sample_weights = np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float32)\n    shuffle_permutation = np.array([2, 0, 3, 1])\n\n    model._network = model.make_network(\n        **model.network_hyperparameter_defaults.subselect(model.hyperparameters)\n    )\n    network = model.network()\n    network.eval()\n\n    x_dict = model.network_input(sequences)\n    x_dict = {\n        key: value[shuffle_permutation]\n        for key, value in x_dict.items()\n    }\n    shuffled_targets = targets[shuffle_permutation]\n    shuffled_weights = sample_weights[shuffle_permutation]\n\n    val_indices = np.arange(len(targets))[2:]\n    with torch.no_grad():\n        val_inputs = {\n            \"sequence\": torch.from_numpy(x_dict[\"sequence\"][val_indices]).float(),\n            \"peptide_length\": torch.from_numpy(x_dict[\"peptide_length\"][val_indices]),\n        }\n        predictions = network(val_inputs)\n        expected = torch.nn.functional.binary_cross_entropy(\n            predictions,\n            torch.from_numpy(shuffled_targets[val_indices]),\n            reduction=\"none\",\n        )\n        expected = (\n            expected *\n            torch.from_numpy(shuffled_weights[val_indices])\n        ).mean().item()\n\n    model.fit(\n        sequences=sequences,\n        targets=targets,\n        sample_weights=sample_weights,\n        shuffle_permutation=shuffle_permutation,\n        verbose=0,\n    )\n\n    assert model.fit_info[-1][\"val_loss\"][0] == pytest.approx(expected, abs=1e-7)\n\n\ndef test_optimizer_defaults_match_keras():\n    affinity_model = _make_simple_affinity_model(optimizer=\"adam\")\n    affinity_model._network = affinity_model.make_network(\n        allele_representations=None,\n        **affinity_model.network_hyperparameter_defaults.subselect(\n            affinity_model.hyperparameters\n        )\n    )\n    affinity_optimizer = affinity_model._create_optimizer(affinity_model.network())\n    assert affinity_optimizer.defaults[\"eps\"] == pytest.approx(1e-07, abs=1e-12)\n\n    processing_model = Class1ProcessingNeuralNetwork(\n        optimizer=\"rmsprop\",\n        learning_rate=0.001,\n    )\n    processing_model._network = processing_model.make_network(\n        **processing_model.network_hyperparameter_defaults.subselect(\n            processing_model.hyperparameters\n        )\n    )\n    processing_optimizer = processing_model._create_optimizer(\n        processing_model.network()\n    )\n    assert processing_optimizer.defaults[\"alpha\"] == pytest.approx(0.9, abs=1e-12)\n    assert processing_optimizer.defaults[\"eps\"] == pytest.approx(1e-07, abs=1e-12)\n\n\ndef test_weight_and_embedding_updates_preserve_device():\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\n    affinity_model = _make_simple_affinity_model(batch_normalization=True)\n    affinity_model._network = affinity_model.make_network(\n        allele_representations=_make_allele_representations(),\n        **affinity_model.network_hyperparameter_defaults.subselect(\n            affinity_model.hyperparameters\n        )\n    )\n    affinity_network = affinity_model.network()\n    affinity_network.to(device)\n\n    affinity_network.set_weights_list(\n        affinity_network.get_weights_list(),\n        auto_convert_keras=False,\n    )\n    assert all(param.device == device for param in affinity_network.parameters())\n    assert all(buffer.device == device for buffer in affinity_network.buffers())\n\n    affinity_model.set_allele_representations(_make_allele_representations(3))\n    assert affinity_network.allele_embedding.weight.device == device\n\n    affinity_model.clear_allele_representations()\n    assert affinity_network.allele_embedding.weight.device == device\n\n    processing_model = Class1ProcessingNeuralNetwork(\n        dropout_rate=0.0,\n        flanking_averages=False,\n        convolutional_kernel_l1_l2=[0.0, 0.0],\n        convolutional_filters=2,\n        convolutional_kernel_size=1,\n        n_flank_length=1,\n        c_flank_length=1,\n        peptide_max_length=8,\n    )\n    processing_model._network = processing_model.make_network(\n        **processing_model.network_hyperparameter_defaults.subselect(\n            processing_model.hyperparameters\n        )\n    )\n    processing_network = processing_model.network()\n    processing_network.to(device)\n\n    processing_network.set_weights_list(\n        processing_network.get_weights_list(),\n        auto_convert_keras=False,\n    )\n    assert all(param.device == device for param in processing_network.parameters())\n    assert all(buffer.device == device for buffer in processing_network.buffers())\n\n\ndef test_cached_keras_weight_reload_preserves_device():\n    data_dir = os.path.join(os.path.dirname(__file__), \"data\")\n    config_path = os.path.join(data_dir, \"master_affinity_fixture_config.json\")\n    weights_path = os.path.join(data_dir, \"master_affinity_fixture_weights.npz\")\n\n    with open(config_path, \"r\") as inp:\n        config = json.load(inp)\n\n    weights = load_weights(weights_path)\n    reloaded_weights = [w.copy() for w in weights]\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\n    Class1NeuralNetwork.clear_model_cache()\n\n    first_model = Class1NeuralNetwork.from_config(config, weights=weights)\n    first_network = first_model.network(borrow=True)\n    first_network.to(device)\n\n    second_model = Class1NeuralNetwork.from_config(config, weights=reloaded_weights)\n    second_network = second_model.network(borrow=True)\n\n    assert second_network is first_network\n    assert all(param.device == device for param in second_network.parameters())\n    assert all(buffer.device == device for buffer in second_network.buffers())\n\n\ndef test_l1_regularization_changes_weights_even_with_zero_data_loss():\n    peptides = [\"AAAAAAAAA\", \"CCCCCCCCC\"]\n\n    _seed_all(13)\n    model = _make_simple_affinity_model(\n        dense_layer_l1_regularization=0.1,\n        dense_layer_l2_regularization=0.0,\n        max_epochs=1,\n        validation_split=0.0,\n        early_stopping=False,\n    )\n\n    # Build network explicitly so we can read weights before fitting.\n    model._network = model.make_network(\n        allele_representations=None,\n        **model.network_hyperparameter_defaults.subselect(model.hyperparameters)\n    )\n\n    affinities = model.predict(peptides)\n    weights_before = [p.detach().cpu().clone() for p in model.network().parameters()]\n\n    model.fit(\n        peptides,\n        affinities,\n        shuffle_permutation=[0, 1],\n    )\n\n    weights_after = [p.detach().cpu().clone() for p in model.network().parameters()]\n    changed = any(\n        not torch.allclose(before, after, rtol=0.0, atol=1e-7)\n        for before, after in zip(weights_before, weights_after)\n    )\n    assert changed\n"
  },
  {
    "path": "test/test_random_negative_peptides.py",
    "content": "\"\"\"Tests for random negative peptide generation.\"\"\"\n\nimport pandas\nimport math\n\nfrom mhcflurry.common import random_peptides\nfrom mhcflurry.random_negative_peptides import RandomNegativePeptides\n\n\ndef test_random_negative_peptides_by_allele_equalize_nonbinders():\n    planner = RandomNegativePeptides(\n        random_negative_method=\"by_allele\",\n        random_negative_binder_threshold=500,\n        random_negative_rate=1.0,\n        random_negative_constant=2)\n\n    data_rows = [\n        (\"HLA-A*02:01\", \"SIINFEKL\", 400, \"=\"),\n        (\"HLA-A*02:01\", \"SIINFEKLL\", 300, \"=\"),\n        (\"HLA-A*02:01\", \"SIINFEKLL\", 300, \"=\"),\n        (\"HLA-A*02:01\", \"SIINFEKLQ\", 1000, \"=\"),\n        (\"HLA-A*02:01\", \"SIINFEKLZZ\", 12000, \">\"),\n    ]\n    for peptide in random_peptides(1000, length=9):\n        data_rows.append((\"HLA-B*44:02\", peptide, 100, \"=\"))\n    for peptide in random_peptides(1000, length=9):\n        data_rows.append((\"HLA-B*44:02\", peptide, 1000, \"=\"))\n    for peptide in random_peptides(5, length=10):\n        data_rows.append((\"HLA-B*44:02\", peptide, 100, \"=\"))\n\n    data = pandas.DataFrame(\n        data_rows,\n        columns=[\"allele\", \"peptide\", \"affinity\", \"inequality\"])\n    data[\"length\"] = data.peptide.str.len()\n\n    planner.plan(\n        peptides=data.peptide.values,\n        affinities=data.affinity.values,\n        alleles=data.allele.values,\n        inequalities=data.inequality.values)\n    result_df = pandas.DataFrame({\n        \"allele\": planner.get_alleles(),\n        \"peptide\": planner.get_peptides(),\n    })\n\n    result_df[\"length\"] = result_df.peptide.str.len()\n    random_negatives = result_df.groupby([\"allele\", \"length\"]).peptide.count().unstack()\n    data.groupby([\"allele\", \"length\"]).peptide.count().unstack().fillna(0)\n    data.loc[\n        data.affinity <= 500\n    ].groupby([\"allele\", \"length\"]).peptide.count().unstack().fillna(0)\n    real_nonbinders = data.loc[\n        data.affinity > 500\n    ].groupby([\"allele\", \"length\"]).peptide.count().unstack().fillna(0)\n    random_negatives + real_nonbinders\n\n    assert (random_negatives.loc[\"HLA-A*02:01\"] == 1.0).all()\n    assert (random_negatives.loc[\"HLA-B*44:02\"] == math.ceil(1007 / 8)).all(), (\n        random_negatives.loc[\"HLA-B*44:02\"], math.ceil(1007 / 8))\n\n\n\ndef test_random_negative_peptides_by_allele():\n    planner = RandomNegativePeptides(\n        random_negative_method=\"by_allele_equalize_nonbinders\",\n        random_negative_binder_threshold=500,\n        random_negative_rate=1.0,\n        random_negative_constant=2)\n    data_rows = [\n        (\"HLA-A*02:01\", \"SIINFEKL\", 400, \"=\"),\n        (\"HLA-A*02:01\", \"SIINFEKLL\", 300, \"=\"),\n        (\"HLA-A*02:01\", \"SIINFEKLL\", 300, \"=\"),\n        (\"HLA-A*02:01\", \"SIINFEKLQ\", 1000, \"=\"),\n        (\"HLA-A*02:01\", \"SIINFEKLZZ\", 12000, \">\"),\n        (\"HLA-C*01:02\", \"SIINFEKLQ\", 100, \"=\"),  # only binders\n        (\"HLA-C*07:02\", \"SIINFEKLL\", 1000, \"=\")   # only non-binders\n\n    ]\n    for peptide in random_peptides(1000, length=9):\n        data_rows.append((\"HLA-B*44:02\", peptide, 100, \"=\"))\n    for peptide in random_peptides(1000, length=9):\n        data_rows.append((\"HLA-B*44:02\", peptide, 1000, \"=\"))\n    for peptide in random_peptides(5, length=10):\n        data_rows.append((\"HLA-B*44:02\", peptide, 100, \"=\"))\n\n    data = pandas.DataFrame(\n        data_rows,\n        columns=[\"allele\", \"peptide\", \"affinity\", \"inequality\"])\n    data[\"length\"] = data.peptide.str.len()\n\n    planner.plan(\n        peptides=data.peptide.values,\n        affinities=data.affinity.values,\n        alleles=data.allele.values,\n        inequalities=data.inequality.values)\n    result_df = pandas.DataFrame({\n        \"allele\": planner.get_alleles(),\n        \"peptide\": planner.get_peptides(),\n    })\n    result_df[\"length\"] = result_df.peptide.str.len()\n    random_negatives = result_df.groupby([\"allele\", \"length\"]).peptide.count().unstack()\n    real_data = data.groupby([\"allele\", \"length\"]).peptide.count().unstack().fillna(0)\n    data.loc[\n        data.affinity <= 500\n    ].groupby([\"allele\", \"length\"]).peptide.count().unstack().fillna(0)\n    real_nonbinders = data.loc[\n        data.affinity > 500\n    ].groupby([\"allele\", \"length\"]).peptide.count().unstack().fillna(0)\n    for length in random_negatives.columns:\n        if length not in real_nonbinders.columns:\n            real_nonbinders[length] = 0\n    total_nonbinders = (\n            random_negatives.reindex(real_data.index).fillna(0) +\n            real_nonbinders.reindex(real_data.index).fillna(0))\n\n    assert (total_nonbinders.loc[\"HLA-A*02:01\"] == 2.0).all(), total_nonbinders\n    assert (total_nonbinders.loc[\"HLA-B*44:02\"] == 1126).all(), total_nonbinders\n\n    assert not total_nonbinders.isnull().any().any()\n"
  },
  {
    "path": "test/test_regression_target.py",
    "content": "\"\"\"Tests for regression target conversion.\"\"\"\n\nfrom mhcflurry.regression_target import (\n    from_ic50,\n    to_ic50,\n)\n\n\ndef test_regression_target_to_ic50():\n    assert to_ic50(0, max_ic50=500.0) == 500\n    assert to_ic50(1, max_ic50=500.0) == 1.0\n\n\ndef test_ic50_to_regression_target():\n    assert from_ic50(5000, max_ic50=5000.0) == 0\n    assert from_ic50(0, max_ic50=5000.0) == 1.0\n"
  },
  {
    "path": "test/test_released_master_predictions.py",
    "content": "\"\"\"\nRegression tests for released model predictions.\n\nExpected values were generated from the TF/Keras implementation on the\npublished model weights and are stored under test/data/.  These tests\nverify that the current (PyTorch) code reproduces the same predictions.\n\"\"\"\nimport json\nimport os\n\nimport numpy as np\n\nfrom mhcflurry import Class1AffinityPredictor\nfrom mhcflurry.downloads import get_path\nfrom mhcflurry.testing_utils import startup, cleanup\n\n\ndef setup_module():\n    startup()\n\n\ndef teardown_module():\n    cleanup()\n\n\ndef _load_expected(name):\n    data_dir = os.path.join(os.path.dirname(__file__), \"data\")\n    with open(os.path.join(data_dir, name), \"r\") as f:\n        return json.load(f)\n\n\ndef test_allele_specific_affinity_predictions():\n    expected = _load_expected(\n        \"master_released_class1_affinity_predictions.json\")[\"allele_specific\"]\n\n    predictor = Class1AffinityPredictor.load(\n        get_path(\"models_class1\", \"models\"))\n\n    predictions = predictor.predict(\n        peptides=expected[\"peptides\"],\n        alleles=expected[\"alleles\"],\n    )\n\n    np.testing.assert_allclose(\n        predictions,\n        np.array(expected[\"predictions\"], dtype=np.float64),\n        rtol=0.01,\n        atol=0.0,\n    )\n\n\ndef test_pan_allele_affinity_predictions():\n    expected = _load_expected(\n        \"master_released_class1_affinity_predictions.json\")[\"pan_allele\"]\n\n    predictor = Class1AffinityPredictor.load(\n        get_path(\"models_class1_pan\", \"models.combined\"))\n\n    predictions = predictor.predict(\n        peptides=expected[\"peptides\"],\n        alleles=expected[\"alleles\"],\n    )\n\n    np.testing.assert_allclose(\n        predictions,\n        np.array(expected[\"predictions\"], dtype=np.float64),\n        rtol=0.01,\n        atol=0.0,\n    )\n"
  },
  {
    "path": "test/test_released_predictors_on_hpv_dataset.py",
    "content": "\"\"\"\nTest accuracy on HPV benchmark used in MHCflurry Cell Systems 2018 paper.\n\nThe study that generated this dataset has now been published\n(Bonsack et al 2019, DOI: 10.1158/2326-6066.CIR-18-0584), and the authors\nrequest that any work based on the HPV dataset cite this paper.\n\"\"\"\n\nimport os\nimport pandas\nimport pytest\nfrom sklearn.metrics import roc_auc_score\n\nfrom mhcflurry import Class1AffinityPredictor\nfrom mhcflurry.downloads import get_path\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\ndef data_path(name):\n    '''\n    Return the absolute path to a file in the test/data directory.\n    The name specified should be relative to test/data.\n    '''\n    return os.path.join(os.path.dirname(__file__), \"data\", name)\n\n\nDF = pandas.read_csv(data_path(\"hpv_predictions.csv\"))\n\n\ndef setup_module():\n    global PREDICTORS\n    startup()\n    PREDICTORS = {\n        'allele-specific': Class1AffinityPredictor.load(\n            get_path(\"models_class1\", \"models\")),\n        'pan-allele': Class1AffinityPredictor.load(\n            get_path(\"models_class1_pan\", \"models.combined\"))\n    }\n\n\ndef teardown_module():\n    global PREDICTORS\n    PREDICTORS = None\n    cleanup()\n\n\n@pytest.fixture(scope=\"module\")\ndef predictors():\n    return PREDICTORS\n\n\ndef test_on_hpv(predictors, df=DF):\n    scores_df = []\n    for (name, predictor) in predictors.items():\n        print(\"Running\", name)\n        df[name] = predictor.predict(df.peptide, alleles=df.allele)\n\n    for name in df.columns[8:]:\n        for nm_cutoff in [2000, 5000, 50000]:\n            labels = df[\"affinity\"] < nm_cutoff\n            auc = roc_auc_score(labels.values, -1 * df[name].values)\n            scores_df.append((name, \"auc-%dnM\" % nm_cutoff, auc))\n    scores_df = pandas.DataFrame(\n        scores_df,\n        columns=[\"predictor\", \"metric\", \"score\"])\n    scores_df = scores_df.pivot(\n        index=\"metric\", columns=\"predictor\", values=\"score\")\n\n    print(\"Predictions\")\n    print(df)\n\n    print(\"Scores\")\n    print(scores_df)\n\n    mean_scores = scores_df.mean()\n    assert mean_scores[\"allele-specific\"] > mean_scores[\"netmhcpan4\"]\n    assert mean_scores[\"pan-allele\"] > mean_scores[\"netmhcpan4\"]\n"
  },
  {
    "path": "test/test_released_predictors_well_correlated.py",
    "content": "\"\"\"\nTest that pan-allele and allele-specific predictors are highly correlated.\n\"\"\"\nfrom __future__ import print_function\nimport argparse\nimport logging\nimport sys\n\nimport numpy\nimport pandas\nimport pytest\n\nfrom mhcflurry import Class1AffinityPredictor\nfrom mhcflurry.encodable_sequences import EncodableSequences\nfrom mhcflurry.downloads import get_path\nfrom mhcflurry.common import random_peptides\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\nlogger = logging.getLogger(\"matplotlib\")\nlogger.disabled = True\n\ndef setup():\n    \"\"\"Setup for running script directly (not via pytest).\"\"\"\n    global PREDICTORS\n    startup()\n    PREDICTORS = {\n        'allele-specific': Class1AffinityPredictor.load(get_path(\"models_class1\", \"models\")),\n        'pan-allele': Class1AffinityPredictor.load(get_path(\"models_class1_pan\", \"models.combined\")),\n    }\n\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    \"\"\"Setup and teardown for each test.\"\"\"\n    global PREDICTORS\n    startup()\n    try:\n        PREDICTORS = {\n            'allele-specific': Class1AffinityPredictor.load(\n                get_path(\"models_class1\", \"models\")),\n            'pan-allele': Class1AffinityPredictor.load(\n                get_path(\"models_class1_pan\", \"models.combined\"), max_models=2)\n        }\n    except Exception:\n        PREDICTORS = None\n    yield\n    PREDICTORS = None\n    cleanup()\n\n\n@pytest.fixture\ndef predictors():\n    return PREDICTORS\n\n\ndef test_correlation(\n        predictors,\n        alleles=None,\n        num_peptides_per_length=1000,\n        lengths=[8, 9, 10],\n        debug=False,\n        return_result=False):\n    peptides = []\n    for length in lengths:\n        peptides.extend(random_peptides(num_peptides_per_length, length))\n\n    # Cache encodings\n    peptides = EncodableSequences.create(list(set(peptides)))\n\n    if alleles is None:\n        alleles = set.intersection(*[\n            set(predictor.supported_alleles) for predictor in predictors.values()\n        ])\n    alleles = sorted(set(alleles))\n    df = pandas.DataFrame(index=peptides.sequences)\n\n    results_df = []\n    for allele in alleles:\n        for (name, predictor) in predictors.items():\n            df[name] = predictor.predict(peptides, allele=allele)\n        correlation = numpy.corrcoef(\n            numpy.log10(df[\"allele-specific\"]),\n            numpy.log10(df[\"pan-allele\"]))[0, 1]\n        results_df.append((allele, correlation))\n        print(len(results_df), len(alleles), *results_df[-1])\n\n        if correlation < 0.6:\n            print(\"Warning: low correlation\", allele)\n            df[\"tightest\"] = df.min(axis=1)\n            print(df.sort_values(\"tightest\").iloc[:, :-1])\n            if debug:\n                import ipdb  # pylint: disable=import-error\n                ipdb.set_trace()\n            del df[\"tightest\"]\n\n    results_df = pandas.DataFrame(results_df, columns=[\"allele\", \"correlation\"])\n    print(results_df)\n\n    print(\"Mean correlation\", results_df.correlation.mean())\n    assert results_df.correlation.mean() > 0.65\n\n    if return_result:\n        return results_df\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\nparser.add_argument(\n    \"--alleles\",\n    nargs=\"+\",\n    default=None,\n    help=\"Which alleles to test\")\n\nif __name__ == '__main__':\n    # If run directly from python, leave the user in a shell to explore results.\n    startup()\n    args = parser.parse_args(sys.argv[1:])\n    result = test_correlation(alleles=args.alleles, debug=True, return_result=True)\n\n    # Leave in ipython\n    import ipdb  # pylint: disable=import-error\n    ipdb.set_trace()\n"
  },
  {
    "path": "test/test_released_presentation_highscore_rows.py",
    "content": "\"\"\"\nRegression tests for released presentation model predictions.\n\nExpected values cover peptide+flank contexts where at least one allele\nhas a high presentation score (>0.9), including low-scoring alleles for\nthe same contexts.  Values were generated from the TF/Keras implementation\nand are stored under test/data/.\n\"\"\"\nimport os\nimport warnings\n\nimport numpy as np\nimport pandas as pd\n\nfrom mhcflurry import Class1AffinityPredictor, Class1PresentationPredictor\nfrom mhcflurry.testing_utils import startup, cleanup\n\n\nwarnings.filterwarnings(\n    \"ignore\",\n    message=r\".*Downcasting behavior in `replace` is deprecated.*\",\n    category=FutureWarning,\n)\n\nEXPECTED_CSV = \"master_released_class1_presentation_highscore_rows.csv.gz\"\nBASE_COLUMNS = [\"row_id\", \"peptide\", \"allele\", \"n_flank\", \"c_flank\"]\nSTRING_COLUMNS = [\"pres_with_best_allele\", \"pres_without_best_allele\"]\nHIGH_SCORE_COLUMNS = [\n    \"pres_with_presentation_score\",\n    \"pres_without_presentation_score\",\n]\n\n\ndef setup_module():\n    startup()\n\n\ndef teardown_module():\n    cleanup()\n\n\ndef _load_expected():\n    data_dir = os.path.join(os.path.dirname(__file__), \"data\")\n    return pd.read_csv(\n        os.path.join(data_dir, EXPECTED_CSV), keep_default_na=False)\n\n\ndef _atol_for_output(column):\n    if \"percentile\" in column:\n        return 1e-5\n    if \"affinity\" in column:\n        return 0.1\n    return 1e-5\n\n\ndef test_expected_data_has_high_and_low_contexts():\n    \"\"\"Sanity-check that the expected data spans a wide score range.\"\"\"\n    expected_df = _load_expected()\n    by_context = expected_df.groupby(\n        [\"peptide\", \"n_flank\", \"c_flank\"], observed=True)\n    context_max = by_context[HIGH_SCORE_COLUMNS].max()\n    context_min = by_context[HIGH_SCORE_COLUMNS].min()\n\n    assert (\n        (context_max[\"pres_with_presentation_score\"] > 0.9)\n        | (context_max[\"pres_without_presentation_score\"] > 0.9)\n    ).all()\n    assert (context_min[\"pres_with_presentation_score\"] < 0.2).all()\n    assert (context_min[\"pres_without_presentation_score\"] < 0.2).all()\n\n\ndef test_presentation_predictions():\n    expected_df = _load_expected()\n\n    affinity_predictor = Class1AffinityPredictor.load()\n    presentation_predictor = Class1PresentationPredictor.load()\n\n    peptides = expected_df[\"peptide\"].tolist()\n    alleles = expected_df[\"allele\"].tolist()\n    n_flanks = expected_df[\"n_flank\"].tolist()\n    c_flanks = expected_df[\"c_flank\"].tolist()\n\n    with warnings.catch_warnings():\n        warnings.filterwarnings(\n            \"ignore\",\n            message=r\".*Downcasting behavior in `replace` is deprecated.*\",\n            category=FutureWarning,\n        )\n        aff_df = affinity_predictor.predict_to_dataframe(\n            peptides=peptides,\n            alleles=alleles,\n            throw=False,\n            include_percentile_ranks=True,\n            include_confidence_intervals=True,\n            centrality_measure=\"mean\",\n            model_kwargs={\"batch_size\": 4096},\n        )\n    np.testing.assert_array_equal(\n        aff_df[\"peptide\"].to_numpy(), expected_df[\"peptide\"].to_numpy())\n    np.testing.assert_array_equal(\n        aff_df[\"allele\"].to_numpy(), expected_df[\"allele\"].to_numpy())\n\n    sample_names = alleles\n    allele_map = {allele: [allele] for allele in sorted(set(alleles))}\n    with warnings.catch_warnings():\n        warnings.filterwarnings(\n            \"ignore\",\n            message=r\".*Downcasting behavior in `replace` is deprecated.*\",\n            category=FutureWarning,\n        )\n        pres_with_df = presentation_predictor.predict(\n            peptides=peptides,\n            alleles=allele_map,\n            sample_names=sample_names,\n            n_flanks=n_flanks,\n            c_flanks=c_flanks,\n            include_affinity_percentile=True,\n            verbose=0,\n            throw=True,\n        ).sort_values(\"peptide_num\")\n        pres_without_df = presentation_predictor.predict(\n            peptides=peptides,\n            alleles=allele_map,\n            sample_names=sample_names,\n            n_flanks=None,\n            c_flanks=None,\n            include_affinity_percentile=True,\n            verbose=0,\n            throw=True,\n        ).sort_values(\"peptide_num\")\n\n    predicted = expected_df[BASE_COLUMNS].copy()\n    predicted[\"affinity_prediction\"] = aff_df[\"prediction\"].values\n    predicted[\"affinity_prediction_low\"] = aff_df.get(\"prediction_low\", np.nan)\n    predicted[\"affinity_prediction_high\"] = aff_df.get(\"prediction_high\", np.nan)\n    predicted[\"affinity_prediction_percentile\"] = aff_df.get(\n        \"prediction_percentile\", np.nan)\n\n    predicted[\"pres_with_affinity\"] = pres_with_df[\"affinity\"].values\n    predicted[\"pres_with_best_allele\"] = (\n        pres_with_df[\"best_allele\"].astype(str).values)\n    predicted[\"pres_with_affinity_percentile\"] = (\n        pres_with_df[\"affinity_percentile\"].values)\n    predicted[\"processing_with_score\"] = pres_with_df[\"processing_score\"].values\n    predicted[\"pres_with_processing_score\"] = (\n        pres_with_df[\"processing_score\"].values)\n    predicted[\"pres_with_presentation_score\"] = (\n        pres_with_df[\"presentation_score\"].values)\n    predicted[\"pres_with_presentation_percentile\"] = (\n        pres_with_df[\"presentation_percentile\"].values)\n\n    predicted[\"pres_without_affinity\"] = pres_without_df[\"affinity\"].values\n    predicted[\"pres_without_best_allele\"] = (\n        pres_without_df[\"best_allele\"].astype(str).values)\n    predicted[\"pres_without_affinity_percentile\"] = (\n        pres_without_df[\"affinity_percentile\"].values)\n    predicted[\"processing_without_score\"] = (\n        pres_without_df[\"processing_score\"].values)\n    predicted[\"pres_without_processing_score\"] = (\n        pres_without_df[\"processing_score\"].values)\n    predicted[\"pres_without_presentation_score\"] = (\n        pres_without_df[\"presentation_score\"].values)\n    predicted[\"pres_without_presentation_percentile\"] = (\n        pres_without_df[\"presentation_percentile\"].values)\n\n    for col in STRING_COLUMNS:\n        np.testing.assert_array_equal(\n            predicted[col].astype(str).to_numpy(),\n            expected_df[col].astype(str).to_numpy(),\n        )\n\n    numeric_columns = [\n        c for c in expected_df.columns\n        if c not in BASE_COLUMNS + STRING_COLUMNS\n    ]\n    for col in numeric_columns:\n        np.testing.assert_allclose(\n            predicted[col].to_numpy(dtype=np.float64),\n            expected_df[col].to_numpy(dtype=np.float64),\n            rtol=0.0,\n            atol=_atol_for_output(col),\n        )\n"
  },
  {
    "path": "test/test_selected_peptides_csv.py",
    "content": "\"\"\"\nRegression checks against selected-peptides.csv.\n\nCompares current MHCflurry predictions to values recorded from the\nprevious public release, and checks NetMHCpan affinity is reasonably close.\n\"\"\"\nimport os\n\nimport numpy as np\nimport pandas as pd\nimport pytest\n\nfrom mhcflurry import Class1AffinityPredictor, Class1PresentationPredictor\nfrom mhcflurry.downloads import get_path\nfrom mhcflurry.testing_utils import startup, cleanup\n\n\nDATA_PATH = os.path.join(\n    os.path.dirname(os.path.dirname(__file__)),\n    \"selected-peptides.csv\",\n)\n\n\ndef _normalize_allele(allele):\n    if allele is None or (isinstance(allele, float) and np.isnan(allele)):\n        return allele\n    allele = str(allele).strip()\n    if allele.startswith(\"HLA-\") or \"-\" in allele:\n        return allele\n    if \"*\" in allele:\n        return f\"HLA-{allele}\"\n    return allele\n\n\n@pytest.fixture(scope=\"module\")\ndef selected_peptides_predictions():\n    startup()\n    try:\n        df = pd.read_csv(DATA_PATH)\n        peptides = df[\"mhcflurry_peptide\"].fillna(df[\"peptide\"]).tolist()\n        alleles = [_normalize_allele(a) for a in df[\"mhcflurry_best_allele\"].tolist()]\n\n        sample_names = [f\"row_{i}\" for i in range(len(peptides))]\n        alleles_dict = {name: [allele] for name, allele in zip(sample_names, alleles)}\n\n        predictor = Class1PresentationPredictor.load(\n            get_path(\"models_class1_presentation\", \"models\")\n        )\n        pred_df = predictor.predict(\n            peptides=peptides,\n            alleles=alleles_dict,\n            sample_names=sample_names,\n            n_flanks=None,\n            c_flanks=None,\n            verbose=0,\n        )\n        pred_df = pred_df.sort_values(\"peptide_num\").reset_index(drop=True)\n        df = df.reset_index(drop=True)\n        return df, pred_df\n    finally:\n        cleanup()\n\n\ndef test_selected_peptides_mhcflurry_matches_csv(selected_peptides_predictions):\n    df, pred_df = selected_peptides_predictions\n\n    np.testing.assert_allclose(\n        pred_df[\"affinity\"].values,\n        df[\"mhcflurry_affinity\"].values.astype(float),\n        rtol=0.01,\n        atol=1e-6,\n    )\n    np.testing.assert_allclose(\n        pred_df[\"processing_score\"].values,\n        df[\"mhcflurry_processing_score\"].values.astype(float),\n        rtol=0.01,\n        atol=1e-6,\n    )\n    np.testing.assert_allclose(\n        pred_df[\"presentation_score\"].values,\n        df[\"mhcflurry_presentation_score\"].values.astype(float),\n        rtol=0.01,\n        atol=1e-6,\n    )\n    np.testing.assert_allclose(\n        pred_df[\"presentation_percentile\"].values,\n        df[\"mhcflurry_presentation_percentile\"].values.astype(float),\n        rtol=0.01,\n        atol=0.1,\n    )\n\n\ndef test_selected_peptides_netmhcpan_affinity_close(selected_peptides_predictions):\n    df, _ = selected_peptides_predictions\n    net_alleles = [_normalize_allele(a) for a in df[\"netmhcpan_best_allele_by_pr\"].tolist()]\n    mhc_alleles = [_normalize_allele(a) for a in df[\"mhcflurry_best_allele\"].tolist()]\n\n    mask = [\n        (net.startswith(\"HLA-A\") or net.startswith(\"HLA-B\"))\n        and (mhc.startswith(\"HLA-A\") or mhc.startswith(\"HLA-B\"))\n        for net, mhc in zip(net_alleles, mhc_alleles)\n    ]\n\n    df = df.loc[mask].reset_index(drop=True)\n    peptides = df[\"mhcflurry_peptide\"].fillna(df[\"peptide\"]).tolist()\n    alleles = [net for net, keep in zip(net_alleles, mask) if keep]\n\n    startup()\n    try:\n        predictor = Class1AffinityPredictor.load(\n            get_path(\"models_class1\", \"models\")\n        )\n        mhc_aff = predictor.predict(peptides=peptides, alleles=alleles).astype(float)\n    finally:\n        cleanup()\n\n    net_aff = df[\"netmhcpan_aff\"].values.astype(float)\n    mhc_aff = np.clip(mhc_aff, 1e-6, None)\n    net_aff = np.clip(net_aff, 1e-6, None)\n    log_diff = np.abs(np.log10(mhc_aff) - np.log10(net_aff))\n\n    # Within 10x for HLA-A/B when comparing to NetMHCpan's best allele.\n    assert (log_diff <= np.log10(10)).all()\n"
  },
  {
    "path": "test/test_speed.py",
    "content": "\"\"\"\nProfile prediction speed\n\n\"\"\"\n\nimport time\nimport cProfile\nimport pstats\nimport collections\nimport argparse\nimport sys\nimport pytest\n\nimport pandas\n\nfrom mhcflurry import Class1AffinityPredictor\nfrom mhcflurry.encodable_sequences import EncodableSequences\nfrom mhcflurry.common import random_peptides\nfrom mhcflurry.downloads import get_path\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\n\nALLELE_SPECIFIC_PREDICTOR = None\nPAN_ALLELE_PREDICTOR = None\n\n\ndef setup_module():\n    global ALLELE_SPECIFIC_PREDICTOR, PAN_ALLELE_PREDICTOR\n    startup()\n    ALLELE_SPECIFIC_PREDICTOR = Class1AffinityPredictor.load(\n        get_path(\"models_class1\", \"models\"))\n\n    PAN_ALLELE_PREDICTOR = Class1AffinityPredictor.load(\n        get_path(\"models_class1_pan\", \"models.combined\"))\n\n\ndef teardown_module():\n    global ALLELE_SPECIFIC_PREDICTOR, PAN_ALLELE_PREDICTOR\n    ALLELE_SPECIFIC_PREDICTOR = None\n    PAN_ALLELE_PREDICTOR = None\n    cleanup()\n\n\nDEFAULT_NUM_PREDICTIONS = 10000\n\n\ndef load_predictors():\n    return {\n        'allele_specific': Class1AffinityPredictor.load(get_path(\"models_class1\", \"models\")),\n        'pan_allele': Class1AffinityPredictor.load(get_path(\"models_class1_pan\", \"models.combined\")),\n    }\n\n\n# Define a fixture to initialize and clean up predictors\n@pytest.fixture(scope=\"module\")\ndef predictors():\n    startup()\n    predictors_dict = load_predictors()\n    yield predictors_dict\n    cleanup()\n\n@pytest.fixture(autouse=True)\ndef init():\n    from . import initialize\n    initialize()\n\ndef test_speed_allele_specific(predictors, profile=False, num=DEFAULT_NUM_PREDICTIONS):\n    starts = collections.OrderedDict()\n    timings = collections.OrderedDict()\n    profilers = collections.OrderedDict()\n\n    predictor = predictors['allele_specific']\n\n    def start(name):\n        starts[name] = time.time()\n        if profile:\n            profilers[name] = cProfile.Profile()\n            profilers[name].enable()\n\n    def end(name):\n        timings[name] = time.time() - starts[name]\n        if profile:\n            profilers[name].disable()\n\n    start(\"first\")\n    predictor.predict([\"SIINFEKL\"], allele=\"HLA-A*02:01\")\n    end(\"first\")\n\n    peptides = random_peptides(num)\n    start(\"pred_%d\" % num)\n    predictor.predict(peptides, allele=\"HLA-A*02:01\")\n    end(\"pred_%d\" % num)\n\n    NUM2 = 10000\n    peptides = EncodableSequences.create(random_peptides(NUM2, length=13))\n    start(\"encode_blosum_%d\" % NUM2)\n    peptides.variable_length_to_fixed_length_vector_encoding(\"BLOSUM62\")\n    end(\"encode_blosum_%d\" % NUM2)\n\n    start(\"pred_already_encoded_%d\" % NUM2)\n    predictor.predict(peptides, allele=\"HLA-A*02:01\")\n    end(\"pred_already_encoded_%d\" % NUM2)\n\n    NUM_REPEATS = 100\n    start(\"pred_already_encoded_%d_%d_times\" % (NUM2, NUM_REPEATS))\n    for _ in range(NUM_REPEATS):\n        predictor.predict(peptides, allele=\"HLA-A*02:01\")\n    end(\"pred_already_encoded_%d_%d_times\" % (NUM2, NUM_REPEATS))\n\n    print(\"SPEED BENCHMARK\")\n    print(\"Results:\\n%s\" % str(pandas.Series(timings)))\n\n    if __name__ == '__main__':\n        # Only return values if run directly from Python, since pytest complains\n        # about return values from tests.\n        return dict(\n            (key, pstats.Stats(value)) for (key, value) in profilers.items())\n\n\ndef test_speed_pan_allele(predictors, profile=False, num=DEFAULT_NUM_PREDICTIONS):\n    starts = collections.OrderedDict()\n    timings = collections.OrderedDict()\n    profilers = collections.OrderedDict()\n\n    predictor = predictors['pan_allele']\n\n    def start(name):\n        starts[name] = time.time()\n        if profile:\n            profilers[name] = cProfile.Profile()\n            profilers[name].enable()\n\n    def end(name):\n        timings[name] = time.time() - starts[name]\n        if profile:\n            profilers[name].disable()\n\n    start(\"first\")\n    predictor.predict([\"SIINFEKL\"], allele=\"HLA-A*02:01\")\n    end(\"first\")\n\n    peptides = random_peptides(num)\n    start(\"pred_%d\" % num)\n    predictor.predict(peptides, allele=\"HLA-A*02:01\")\n    end(\"pred_%d\" % num)\n\n    print(\"SPEED BENCHMARK\")\n    print(\"Results:\\n%s\" % str(pandas.Series(timings)))\n\n    if __name__ == '__main__':\n        # Only return values if run directly from Python, since pytest complains\n        # about return values from tests.\n        return dict(\n            (key, pstats.Stats(value)) for (key, value) in profilers.items())\n\n\nparser = argparse.ArgumentParser(usage=__doc__)\nparser.add_argument(\n    \"--predictor\",\n    nargs=\"+\",\n    choices=[\"allele-specific\", \"pan-allele\"],\n    default=[\"allele-specific\", \"pan-allele\"],\n    help=\"Which predictors to run\")\n\nparser.add_argument(\n    \"--num-predictions\",\n    type=int,\n    default=DEFAULT_NUM_PREDICTIONS,\n    help=\"Number of predictions to run\")\n\nif __name__ == '__main__':\n    # If run directly from python, do profiling and leave the user in a shell\n    # to explore results.\n\n    args = parser.parse_args(sys.argv[1:])\n    setup_module()\n    predictors_dict = {\n        \"allele_specific\": ALLELE_SPECIFIC_PREDICTOR,\n        \"pan_allele\": PAN_ALLELE_PREDICTOR,\n    }\n\n    if \"allele-specific\" in args.predictor:\n        print(\"Running allele-specific test\")\n        result = test_speed_allele_specific(\n            predictors=predictors_dict,\n            profile=True,\n            num=args.num_predictions)\n        result[\n            \"pred_%d\" % args.num_predictions\n        ].sort_stats(\"cumtime\").reverse_order().print_stats()\n\n    if \"pan-allele\" in args.predictor:\n        print(\"Running pan-allele test\")\n        result = test_speed_pan_allele(\n            predictors=predictors_dict,\n            profile=True,\n            num=args.num_predictions)\n        result[\n            \"pred_%d\" % args.num_predictions\n        ].sort_stats(\"cumtime\").reverse_order().print_stats()\n\n    # Leave in ipython\n    locals().update(result)\n    import ipdb  # pylint: disable=import-error\n    ipdb.set_trace()\n"
  },
  {
    "path": "test/test_train_and_related_commands.py",
    "content": "\"\"\"\nTest train, calibrate percentile ranks, and model selection commands.\n\"\"\"\n\nimport json\nimport os\nimport shutil\nimport tempfile\nimport subprocess\nfrom copy import deepcopy\nimport pytest\n\nfrom numpy.testing import assert_array_less\n\nfrom mhcflurry import Class1AffinityPredictor\nfrom mhcflurry.downloads import get_path\nfrom .pytest_helpers import mhcflurry_cli\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\nos.environ[\"CUDA_VISIBLE_DEVICES\"] = \"\"\n\nHYPERPARAMETERS = [\n    {\n        \"n_models\": 2,\n        \"max_epochs\": 500,\n        \"patience\": 5,\n        \"minibatch_size\": 128,\n        \"early_stopping\": True,\n        \"validation_split\": 0.2,\n\n        \"random_negative_rate\": 0.0,\n        \"random_negative_constant\": 25,\n\n        \"peptide_amino_acid_encoding\": \"BLOSUM62\",\n        \"use_embedding\": False,\n        \"kmer_size\": 15,\n        \"batch_normalization\": False,\n        \"locally_connected_layers\": [\n            {\n                \"filters\": 8,\n                \"activation\": \"tanh\",\n                \"kernel_size\": 3\n            }\n        ],\n        \"activation\": \"tanh\",\n        \"output_activation\": \"sigmoid\",\n        \"layer_sizes\": [\n            16\n        ],\n        \"random_negative_affinity_min\": 20000.0,\n        \"random_negative_affinity_max\": 50000.0,\n        \"dense_layer_l1_regularization\": 0.001,\n        \"dropout_probability\": 0.0\n    }\n]\n\n\ndef run_and_check(n_jobs=0):\n    models_dir = tempfile.mkdtemp(prefix=\"mhcflurry-test-models\")\n    hyperparameters_filename = os.path.join(\n        models_dir, \"hyperparameters.yaml\")\n    with open(hyperparameters_filename, \"w\") as fd:\n        json.dump(HYPERPARAMETERS, fd)\n\n    args = mhcflurry_cli(\"mhcflurry-class1-train-allele-specific-models\") + [\n        \"--data\", get_path(\"data_curated\", \"curated_training_data.affinity.csv.bz2\"),\n        \"--hyperparameters\", hyperparameters_filename,\n        \"--allele\", \"HLA-A*02:01\", \"HLA-A*03:01\",\n        \"--out-models-dir\", models_dir,\n        \"--num-jobs\", str(n_jobs),\n    ]\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    # Calibrate percentile ranks\n    args = mhcflurry_cli(\"mhcflurry-calibrate-percentile-ranks\") + [\n        \"--models-dir\", models_dir,\n        \"--num-peptides-per-length\", \"10000\",\n        \"--num-jobs\", str(n_jobs),\n    ]\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    result = Class1AffinityPredictor.load(models_dir)\n    predictions = result.predict(\n        peptides=[\"SLYNTVATL\"],\n        alleles=[\"HLA-A*02:01\"])\n    assert predictions.shape == (1,)\n    assert_array_less(predictions, 1000)\n    df = result.predict_to_dataframe(\n            peptides=[\"SLYNTVATL\"],\n            alleles=[\"HLA-A*02:01\"])\n    print(df)\n    assert \"prediction_percentile\" in df.columns\n\n    print(\"Deleting: %s\" % models_dir)\n    shutil.rmtree(models_dir)\n\n\ndef run_and_check_with_model_selection(n_jobs=1):\n    models_dir1 = tempfile.mkdtemp(prefix=\"mhcflurry-test-models\")\n    hyperparameters_filename = os.path.join(\n        models_dir1, \"hyperparameters.yaml\")\n\n    # Include one architecture that has max_epochs = 0. We check that it never\n    # gets selected in model selection.\n    hyperparameters = [\n        deepcopy(HYPERPARAMETERS[0]),\n        deepcopy(HYPERPARAMETERS[0]),\n    ]\n    hyperparameters[-1][\"max_epochs\"] = 0\n    with open(hyperparameters_filename, \"w\") as fd:\n        json.dump(hyperparameters, fd)\n\n    args = mhcflurry_cli(\"mhcflurry-class1-train-allele-specific-models\") + [\n        \"--data\", get_path(\"data_curated\", \"curated_training_data.affinity.csv.bz2\"),\n        \"--hyperparameters\", hyperparameters_filename,\n        \"--allele\", \"HLA-A*02:01\", \"HLA-A*03:01\",\n        \"--out-models-dir\", models_dir1,\n        \"--num-jobs\", str(n_jobs),\n        \"--held-out-fraction-reciprocal\", \"10\",\n        \"--n-models\", \"1\",\n    ]\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    result = Class1AffinityPredictor.load(models_dir1)\n    assert len(result.neural_networks) == 4\n\n    models_dir2 = tempfile.mkdtemp(prefix=\"mhcflurry-test-models\")\n    args = mhcflurry_cli(\"mhcflurry-class1-select-allele-specific-models\") + [\n        \"--data\",\n        get_path(\"data_curated\", \"curated_training_data.affinity.csv.bz2\"),\n        \"--exclude-data\", models_dir1 + \"/train_data.csv.bz2\",\n        \"--out-models-dir\", models_dir2,\n        \"--models-dir\", models_dir1,\n        \"--num-jobs\", str(n_jobs),\n        \"--mse-max-models\", \"1\",\n        \"--unselected-accuracy-scorer\", \"combined:mass-spec,mse\",\n        \"--unselected-accuracy-percentile-threshold\", \"95\",\n    ]\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    result = Class1AffinityPredictor.load(models_dir2)\n    assert len(result.neural_networks) == 2\n    assert (\n        len(result.allele_to_allele_specific_models[\"HLA-A*02:01\"]) == 1)\n    assert (\n        len(result.allele_to_allele_specific_models[\"HLA-A*03:01\"]) == 1)\n    assert (\n        result.allele_to_allele_specific_models[\"HLA-A*02:01\"][0].hyperparameters[\"max_epochs\"] == 500)\n    assert (\n        result.allele_to_allele_specific_models[\"HLA-A*03:01\"][\n            0].hyperparameters[\"max_epochs\"] == 500)\n\n    print(\"Deleting: %s\" % models_dir1)\n    print(\"Deleting: %s\" % models_dir2)\n    shutil.rmtree(models_dir1)\n\n\ndef test_run_parallel():\n    run_and_check(n_jobs=2)\n    run_and_check_with_model_selection(n_jobs=2)\n\n\ndef test_run_serial():\n    run_and_check(n_jobs=0)\n    run_and_check_with_model_selection(n_jobs=0)\n"
  },
  {
    "path": "test/test_train_pan_allele_models_command.py",
    "content": "\"\"\"\nTests for training and predicting using Class1 pan-allele models.\n\"\"\"\n\nimport json\nimport os\nimport shutil\nimport tempfile\nimport subprocess\nimport sys\n\nimport pandas\nimport pytest\n\nfrom numpy.testing import assert_array_less\n\nfrom mhcflurry import Class1AffinityPredictor\nfrom mhcflurry.downloads import get_path\nfrom .pytest_helpers import mhcflurry_cli\n\nfrom mhcflurry.testing_utils import cleanup, startup\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\nos.environ[\"CUDA_VISIBLE_DEVICES\"] = \"\"\nos.environ[\"MHCFLURRY_CLUSTER_WORKER_COMMAND\"] = (\n    f\"{sys.executable} -m mhcflurry.cluster_worker_entry_point\"\n)\n\n\nHYPERPARAMETERS_LIST = [\n{\n    'activation': 'tanh',\n    'allele_dense_layer_sizes': [],\n    'batch_normalization': False,\n    'dense_layer_l1_regularization': 0.0,\n    'dense_layer_l2_regularization': 0.0,\n    'dropout_probability': 0.5,\n    'early_stopping': True,\n    'init': 'glorot_uniform',\n    'layer_sizes': [64],\n    'learning_rate': None,\n    'locally_connected_layers': [],\n    'loss': 'custom:mse_with_inequalities',\n    'max_epochs': 0,  # never selected\n    'minibatch_size': 256,\n    'optimizer': 'rmsprop',\n    'output_activation': 'sigmoid',\n    'patience': 10,\n    'peptide_allele_merge_activation': '',\n    'peptide_allele_merge_method': 'concatenate',\n    'peptide_amino_acid_encoding': 'BLOSUM62',\n    'peptide_dense_layer_sizes': [],\n    'peptide_encoding': {\n        'alignment_method': 'left_pad_centered_right_pad',\n        'max_length': 15,\n        'vector_encoding_name': 'BLOSUM62',\n    },\n    'random_negative_affinity_max': 50000.0,\n    'random_negative_affinity_min': 20000.0,\n    'random_negative_constant': 25,\n    'random_negative_distribution_smoothing': 0.0,\n    'random_negative_match_distribution': True,\n    'random_negative_rate': 0.2,\n    'train_data': {\"pretrain\": False},\n    'validation_split': 0.1,\n    'data_dependent_initialization_method': \"lsuv\",\n},\n{\n    'activation': 'tanh',\n    'allele_dense_layer_sizes': [],\n    'batch_normalization': False,\n    'dense_layer_l1_regularization': 0.0,\n    'dense_layer_l2_regularization': 0.0,\n    'dropout_probability': 0.5,\n    'early_stopping': True,\n    'init': 'glorot_uniform',\n    'layer_sizes': [32],\n    'learning_rate': None,\n    'locally_connected_layers': [],\n    'loss': 'custom:mse_with_inequalities',\n    'max_epochs': 5,\n    'minibatch_size': 256,\n    'optimizer': 'rmsprop',\n    'output_activation': 'sigmoid',\n    'patience': 5,\n    'peptide_allele_merge_activation': '',\n    'peptide_allele_merge_method': 'concatenate',\n    'peptide_amino_acid_encoding': 'BLOSUM62',\n    'peptide_dense_layer_sizes': [],\n    'peptide_encoding': {\n        'alignment_method': 'left_pad_centered_right_pad',\n        'max_length': 15,\n        'vector_encoding_name': 'BLOSUM62',\n    },\n    'random_negative_affinity_max': 50000.0,\n    'random_negative_affinity_min': 20000.0,\n    'random_negative_constant': 25,\n    'random_negative_distribution_smoothing': 0.0,\n    'random_negative_match_distribution': True,\n    'random_negative_rate': 0.2,\n    'train_data': {\n        \"pretrain\": True,\n        'pretrain_peptides_per_step': 4,\n        'pretrain_max_epochs': 2,\n        'pretrain_max_val_loss': 0.2,\n    },\n    'validation_split': 0.1,\n},\n]\n\nPRETRAIN_DATA = \"\"\"\n,BoLA-6*13:01,Eqca-1*01:01,H-2-Db,H-2-Dd,H-2-Kb,H-2-Kd,H-2-Kk,H-2-Ld,HLA-A*01:01,HLA-A*02:01,HLA-A*02:02,HLA-A*02:03,HLA-A*02:05,HLA-A*02:06,HLA-A*02:07,HLA-A*02:11,HLA-A*02:12,HLA-A*02:16,HLA-A*02:17,HLA-A*02:19,HLA-A*02:50,HLA-A*03:01,HLA-A*11:01,HLA-A*23:01,HLA-A*24:02,HLA-A*24:03,HLA-A*25:01,HLA-A*26:01,HLA-A*26:02,HLA-A*26:03,HLA-A*29:02,HLA-A*30:01,HLA-A*30:02,HLA-A*31:01,HLA-A*32:01,HLA-A*33:01,HLA-A*66:01,HLA-A*68:01,HLA-A*68:02,HLA-A*68:23,HLA-A*69:01,HLA-A*80:01,HLA-B*07:01,HLA-B*07:02,HLA-B*08:01,HLA-B*08:02,HLA-B*08:03,HLA-B*14:02,HLA-B*15:01,HLA-B*15:02,HLA-B*15:03,HLA-B*15:09,HLA-B*15:17,HLA-B*18:01,HLA-B*27:03,HLA-B*27:05,HLA-B*35:01,HLA-B*35:03,HLA-B*38:01,HLA-B*39:01,HLA-B*40:01,HLA-B*40:02,HLA-B*42:01,HLA-B*44:02,HLA-B*44:03,HLA-B*45:01,HLA-B*46:01,HLA-B*48:01,HLA-B*51:01,HLA-B*53:01,HLA-B*54:01,HLA-B*57:01,HLA-B*58:01,HLA-B*83:01,HLA-C*03:03,HLA-C*05:01,HLA-C*06:02,HLA-C*07:02,HLA-C*12:03,HLA-C*15:02,Mamu-A*01:01,Mamu-A*02:01,Mamu-A*02:0102,Mamu-A*07:01,Mamu-A*11:01,Mamu-A*22:01,Mamu-A*26:01,Mamu-B*01:01,Mamu-B*03:01,Mamu-B*08:01,Mamu-B*10:01,Mamu-B*17:01,Mamu-B*17:04,Mamu-B*39:01,Mamu-B*52:01,Mamu-B*83:01,Patr-A*01:01,Patr-A*04:01,Patr-A*07:01,Patr-A*09:01,Patr-B*01:01,Patr-B*13:01,Patr-B*24:01\nVCCIYWDISYCTCQ,44182.5,42134.1,44152.5,42887.9,30441.3,38592.9,41962.6,42822.6,25616.4,22063.8,35987.9,36692.9,40859.1,41072.7,38672.0,36913.2,40334.9,36647.9,36738.3,43307.7,40456.6,29511.6,31662.5,42630.7,40073.5,42494.2,36130.4,39625.4,39909.6,38751.8,39681.3,36767.7,36100.5,29302.9,46286.3,26136.6,37672.1,34302.1,37976.5,37864.7,31506.7,32281.1,40357.5,35316.3,29220.1,35346.3,34818.9,41566.6,31951.6,38975.1,40825.3,44917.4,43148.3,34119.6,37508.6,37930.1,29881.9,42463.0,39552.8,36529.9,36587.0,35603.6,34376.4,37498.8,42253.7,37676.9,30077.2,39384.4,33218.6,36576.7,38777.8,29806.4,28986.1,34746.3,39748.3,38600.2,40338.9,39364.9,41546.0,39574.0,42059.6,46195.4,41975.4,44810.3,44629.7,42749.5,43288.1,46931.2,39014.4,39893.3,41316.9,41923.5,41918.2,42723.5,38612.0,39772.1,42175.5,38879.7,41908.9,38287.3,41522.6,39666.8,43396.1\nCQFVANRCHQKVFRL,39146.5,39452.0,9484.7,39116.9,13002.1,27669.4,30010.9,37655.0,31624.9,12478.3,28164.5,30813.8,39656.3,24968.0,33852.1,30953.1,31049.5,32169.5,33800.9,36730.1,37233.0,17696.0,31413.6,26134.9,31618.0,40342.3,35511.9,38680.3,38605.3,37388.9,38012.0,36161.2,36449.0,23276.0,39330.3,38518.9,37164.7,36824.9,32292.1,38968.4,31223.9,31704.9,35919.0,32976.9,28484.1,33958.6,32564.3,37876.1,24948.2,36401.9,34953.6,36509.9,39256.5,28768.4,24109.5,10714.6,40389.5,43803.1,32800.9,26891.8,27654.3,23972.8,35700.0,32613.3,26999.3,28714.6,29744.2,33689.4,34232.0,32967.2,37510.6,32342.9,30111.8,30403.3,38445.6,38237.8,37861.1,37985.3,39969.8,39538.2,38738.5,42978.4,39435.6,41359.5,16024.0,39753.9,41649.9,39454.1,32748.2,27406.8,38950.9,30001.9,27833.5,32655.5,27208.1,37915.8,39650.7,37642.0,37611.6,32313.3,36946.5,33275.5,39516.1\nYNWDWAQCSGI,35747.3,29140.6,30472.7,33365.7,1476.4,4321.6,10641.6,32903.8,20380.9,4780.3,12919.5,17551.4,22162.1,13392.5,16314.1,7991.3,23261.7,6525.2,15695.2,27362.5,24716.2,28711.7,30898.9,23808.7,25642.9,39880.0,30787.6,33162.7,33052.3,33146.9,21397.6,36392.2,26063.9,28349.8,40062.1,26596.2,32116.4,31183.4,12831.5,35000.6,17873.4,30601.4,30336.8,24523.8,18060.1,28888.9,26060.1,34612.6,14003.0,32129.0,31080.5,35535.7,32822.4,24605.3,20247.7,8311.8,31112.8,40916.8,29408.5,29217.6,31888.1,25260.8,27729.2,28121.0,36758.1,32991.8,25414.3,32947.6,6063.8,33594.4,20760.6,28579.3,25034.7,23129.8,35563.0,30887.8,31750.0,27229.8,33731.0,32884.0,5407.3,4834.1,35326.9,25099.6,3516.2,32488.6,32741.2,40440.2,20202.0,21406.5,31991.4,22185.6,24853.2,27460.7,5335.4,15766.5,34520.1,33168.8,32759.1,10026.4,3042.1,27291.3,17873.3\nAPEPVMMQGCDN,44794.5,42064.7,36712.6,42567.1,31312.9,25512.2,38772.9,32376.8,31503.8,28373.3,45393.5,41842.1,43223.5,45234.2,39579.4,40729.5,41441.1,38673.3,37791.6,43381.7,42993.3,34470.7,37888.9,43650.8,41233.2,42212.6,34542.5,37432.8,37800.1,36891.9,40060.0,40205.7,35137.4,33173.1,46841.4,41392.7,36465.8,37628.0,41854.1,39959.8,30890.1,32541.7,34548.9,20953.5,34911.7,37295.7,35305.7,37963.4,35794.0,39768.6,41915.4,44501.8,43077.3,34135.5,36039.6,36968.8,30841.4,40495.2,36776.4,34237.8,33980.3,34729.3,36210.8,34585.1,41600.5,36181.2,28528.2,38973.9,32572.4,37361.4,35637.2,31359.7,31602.5,24366.2,39960.9,36509.1,43038.1,41238.5,41575.8,37826.4,39195.7,45378.3,42078.6,44831.9,41156.4,39194.0,44144.7,46339.5,39007.9,40269.1,40479.5,41508.5,41613.3,42798.9,38927.9,37961.6,43384.7,40127.5,43129.2,38632.1,42726.0,31364.8,43992.3\nSIQNDHQFCNE,41247.1,36778.2,31563.6,39917.4,10018.5,26242.4,31501.4,36002.1,28366.2,23209.0,28116.8,27058.3,33981.8,23981.3,33667.0,30816.8,36286.8,30787.3,33485.0,40625.7,32634.4,11132.3,2621.0,37879.7,31918.2,41386.8,30477.2,29618.2,30679.9,31896.3,36876.3,29016.4,21555.8,16172.6,35160.7,22537.3,32996.3,21159.4,27137.4,29441.1,29994.0,32416.6,33022.6,31767.9,19129.5,25393.9,30423.1,37177.4,17995.5,32992.6,33549.6,39935.4,43477.4,33433.2,34472.0,34017.7,41050.6,44500.3,37744.9,35526.9,34872.2,33849.1,26222.6,32226.4,42709.1,36014.3,26019.6,38845.3,32771.8,37961.9,40955.2,21375.2,28595.7,30962.6,38296.9,37179.4,40323.2,38384.1,39617.4,38508.9,33177.7,25294.9,37478.4,42231.1,30983.3,33685.5,39692.9,45876.2,32965.1,34191.3,33783.1,36999.0,37443.1,38511.6,21638.2,31981.6,35943.1,30735.1,37418.3,19187.4,35391.0,36495.6,37896.1\nWAVYMCISAPL,26021.0,16088.8,5262.2,11881.8,3628.8,10658.3,7398.9,18827.6,24553.0,6062.3,3725.9,6626.5,12806.4,1919.8,21857.7,13408.4,16709.7,11225.4,21476.2,33110.7,10213.0,20997.3,25916.5,17285.9,14772.3,39728.6,14817.4,13098.4,17583.7,18495.2,8523.0,24610.3,16200.6,12117.8,29147.3,12013.4,21713.9,5327.3,340.7,5501.6,10100.1,31368.4,13169.4,2241.8,1718.9,9124.3,13725.7,15124.6,2411.5,15438.8,6023.0,13412.1,11652.5,16573.2,10559.6,4402.9,1293.3,19356.7,5939.7,4715.8,12976.0,10061.8,8468.5,8280.0,17922.2,10362.6,6628.0,11632.8,8235.4,9815.9,4647.5,14010.5,7109.3,5989.7,4340.4,5718.2,22291.9,12581.6,19864.1,6913.5,2876.7,249.7,23174.7,7008.9,3938.4,15144.1,22744.4,14198.8,9360.4,6219.5,16554.9,16622.6,13488.7,13740.1,5481.3,11074.1,21412.3,23009.7,23098.3,7874.1,3966.5,11364.2,20621.6\nSSTFMWVLHCHKNG,44865.8,42298.8,38909.9,42717.9,25716.2,39557.0,40608.0,43890.1,30252.2,15498.1,40268.4,35903.4,41637.4,43457.6,35753.7,34556.2,41097.6,32160.7,34584.8,43866.0,41238.7,26830.0,28742.7,42919.2,39493.0,42250.6,34514.1,37210.7,37497.0,36667.9,38414.4,31386.2,35609.3,27734.9,44761.2,32154.9,36404.8,21535.6,33460.4,34847.3,31448.6,33166.9,39703.3,37050.6,33174.4,36349.1,35093.6,40923.2,30340.5,38489.6,39269.6,43277.0,40119.8,34672.8,37568.4,38813.7,42587.8,45513.4,39465.4,36470.4,36334.7,34517.7,36501.0,36759.8,39608.8,37140.1,28997.9,39201.9,34168.3,40416.3,31511.1,23702.6,24459.9,37410.4,40441.5,39727.9,43281.4,42200.1,41301.8,41022.1,38716.0,38079.6,41414.7,44625.9,44299.1,42349.6,44097.4,46688.8,40452.1,40547.5,40968.6,41460.3,41161.2,41760.6,36773.2,37735.2,43469.9,41742.1,42940.2,39345.1,40750.3,38906.1,43829.8\nNDYRIIHVH,29750.7,7903.2,39663.4,26269.3,26520.4,17741.9,6530.7,23519.5,16783.4,24907.1,38796.0,34045.0,37236.8,32764.2,33910.4,31876.9,36690.2,26640.4,34771.6,37988.3,38108.1,22606.4,22956.5,33713.4,23577.7,33039.6,28233.4,29657.2,20610.8,22948.6,22252.9,18901.8,9758.7,12195.3,32876.3,13544.5,27628.0,11451.6,26564.7,17732.1,22462.4,12031.0,28807.3,28633.4,20666.2,28277.9,23257.5,25003.9,11971.8,23476.4,17776.0,38533.8,33476.2,984.9,22771.0,23104.9,15921.4,31338.4,27375.9,25519.2,21732.5,10818.2,26628.9,18322.9,12300.8,17540.1,22481.6,25575.0,31230.6,28830.2,23186.7,22908.0,22448.4,12387.8,35440.0,29550.4,22569.4,22414.0,34456.6,27987.0,33094.3,26622.5,21300.3,29112.5,17602.3,17604.6,26471.7,14319.8,26587.4,24828.2,23827.5,38974.0,37958.0,27690.5,24379.9,14117.3,26238.3,16518.9,32692.1,23436.5,29298.5,21037.6,9746.2\nNEVLWRNRILEIIN,43661.7,42466.3,41698.3,43109.4,35534.9,38811.3,33366.6,43465.4,29895.3,32658.2,42471.5,39083.2,42304.8,41013.3,40566.9,39243.5,41674.1,38714.2,38754.7,43677.4,42242.6,33804.1,37618.9,43068.1,39876.2,42639.4,35534.9,38686.6,39092.5,37827.9,41863.4,39871.7,40872.5,34461.3,46553.1,37443.4,36962.9,32868.0,42030.3,38619.2,31571.3,33145.8,40389.2,35203.0,32552.2,37443.5,36114.8,40695.8,33422.3,38902.1,40563.2,44825.5,44491.1,21154.4,37869.9,38601.7,41773.6,44984.0,39218.1,35042.4,28128.9,27476.4,37155.8,31523.1,30333.4,30040.7,30515.9,34905.5,36266.3,41366.6,41046.5,34295.9,31764.1,30961.9,40911.5,41612.8,43358.8,42536.8,41822.6,42428.9,45258.0,47372.9,39653.2,44025.0,39708.5,41722.2,44182.7,47429.4,39704.9,40816.4,40724.4,43114.5,44056.6,43817.1,40210.9,40351.9,43651.0,41203.5,43747.3,40571.0,43173.7,40487.6,43099.8\nTDPNYHTHFSVT,44066.4,36674.1,32271.2,38109.4,21052.2,36108.5,36135.4,41980.7,30404.2,22295.5,41215.5,34035.9,41445.4,39809.0,36623.4,37166.7,39817.9,34899.7,34782.4,42846.6,41618.2,35189.1,37225.1,41790.5,39337.7,41729.1,35779.5,38304.8,38869.4,36969.1,41858.0,39576.0,39532.8,35183.1,45296.5,40505.4,36365.8,35874.3,36030.8,39184.3,30853.7,32919.7,38420.4,33262.2,27467.2,33073.5,34039.6,41103.6,32110.1,38958.8,39464.1,42709.4,44457.7,34432.4,33556.3,33590.2,40714.1,44637.0,38843.7,36048.2,28612.5,26040.4,30697.6,36461.0,39668.4,35404.3,29492.7,33964.7,32276.1,43412.5,29774.3,32512.1,32916.2,31862.0,39917.3,39059.0,42346.8,41351.6,40547.8,39095.9,33910.1,45607.0,41771.1,43517.8,41057.9,41285.8,43851.4,46378.9,40046.9,39228.6,39703.3,41008.7,41230.3,41780.3,38014.6,40518.1,43804.4,41473.9,42605.4,37678.0,39528.8,37422.5,39159.1\nHQWFQCVAMQSY,27487.0,30700.5,37273.9,35376.0,29078.2,27724.0,20597.0,30480.4,17310.7,16021.7,37523.0,33961.1,40969.4,19968.6,34875.0,31825.6,37331.1,31175.8,29412.7,41535.5,40642.4,12424.8,23533.8,35308.2,33245.6,39775.3,22472.1,17719.2,19849.4,24844.8,7505.9,31364.7,4307.2,16799.1,27251.2,27379.8,25184.4,23448.3,35342.1,32138.7,29292.0,19190.3,30915.6,30562.0,31545.7,35961.0,30709.6,30188.3,499.8,14875.3,5731.1,34921.0,31566.9,5655.9,8402.1,1542.5,14847.3,32801.6,25414.0,23775.9,29195.9,18812.6,35386.2,4887.0,7298.3,19041.1,13830.1,30353.9,34462.2,23459.1,27710.9,18963.8,23748.8,14531.7,35047.3,36914.8,25988.6,29764.9,29475.3,39036.2,39794.4,29819.9,26826.0,32217.7,10013.9,26916.3,34912.0,37665.9,24758.7,17295.9,28531.8,24764.5,16139.5,30351.5,20914.8,27508.2,32628.3,28530.1,26332.1,16659.9,32297.3,30264.2,32507.1\nIWQVYIQCGTEM,41061.7,37961.5,25302.7,38762.8,17765.9,10623.3,31859.8,31245.1,31023.4,25442.0,30489.9,35963.1,37256.7,35425.5,37360.4,37226.3,37086.9,36788.1,35657.8,41540.2,35619.8,32962.4,37114.7,20325.1,20101.6,32000.1,32835.6,34734.3,34929.6,33865.9,29462.0,35562.1,26925.2,30392.0,43469.1,35884.5,34001.2,36984.6,33796.5,39019.3,29735.5,32695.2,33217.4,16898.4,22006.2,33778.4,34261.2,38876.7,12971.4,29024.0,24142.0,39208.5,36860.8,23883.6,30132.9,22085.8,22174.9,36579.1,34827.9,33265.2,26867.1,27697.8,30006.4,26056.1,35488.9,31119.9,22043.6,33850.8,22722.1,23271.5,39360.0,25160.2,26966.6,22818.3,34267.7,30985.1,30357.2,27903.9,30255.5,30990.4,28298.9,30412.8,38456.8,39932.5,35373.0,34095.3,39746.8,44862.0,21759.5,21611.7,35509.8,40609.4,40356.5,36262.1,30415.4,38309.5,40677.9,32454.1,34949.9,8240.3,38245.3,27499.7,38969.8\nWNPMPYADKDN,43793.1,41967.1,40595.5,38058.9,26509.5,29836.7,35333.9,43560.0,31045.3,24804.1,41242.6,39742.3,40936.8,42471.1,39179.9,38693.2,41403.3,37812.8,38097.8,43532.7,40502.2,34092.6,36870.7,42033.7,29201.6,41949.2,32912.4,35608.2,36362.6,32689.5,35304.0,39432.3,27060.9,33299.5,46315.2,38181.4,35417.9,33307.1,39797.7,37844.6,31558.8,32729.9,38648.1,34952.4,31312.2,34866.6,35333.3,39596.8,24432.7,37399.1,34958.8,36498.7,43456.6,37726.4,30345.3,29272.5,40734.0,44974.5,37794.7,36123.6,34346.3,31343.8,35849.8,37541.7,43371.3,38712.9,27773.2,38014.3,34428.3,43181.5,43353.8,25804.8,30897.2,38150.3,39755.9,39856.5,42141.5,40976.5,40968.5,41109.5,21640.6,36394.9,38594.2,43406.0,41654.3,40884.6,43121.2,46926.9,33685.5,38138.6,39382.6,36892.5,38091.4,41887.0,30276.8,30591.1,39255.5,39942.9,42992.5,34800.4,35997.4,39474.3,42450.7\nDYLTMYNLAGHYMF,41216.3,39319.5,37516.7,38946.9,24901.2,18489.0,36248.8,41018.7,29045.8,30236.2,40621.7,40522.9,42015.7,40969.5,38931.0,38020.7,41315.7,36615.3,36461.1,43425.3,42086.8,32686.2,34084.9,4938.4,5634.0,22583.4,31068.1,33487.8,34530.0,34429.4,14893.3,38319.0,33635.5,29912.6,38086.4,18012.7,34384.5,28858.3,36132.1,31816.5,29143.5,30528.5,37580.3,35468.6,23500.5,33626.4,31227.5,37015.5,30141.8,37652.6,36980.8,41228.8,37043.9,11156.5,34073.5,32101.7,21944.1,35367.7,32575.7,31466.1,33729.3,30785.8,33940.0,34077.9,32036.2,36565.2,27349.4,37480.3,30117.8,34138.7,38034.4,28652.9,30980.1,31970.7,38401.2,39857.3,39077.2,35957.9,36971.2,40692.9,38297.0,42915.1,38921.3,38533.3,41037.2,38777.0,40785.5,42972.7,39144.8,37696.9,39104.8,37455.3,37600.0,40946.1,32211.7,37757.2,39846.1,37276.1,31433.1,26746.3,39308.8,32511.1,41919.2\nCPGSCSNVEWFTSA,43052.5,41000.2,37276.6,40353.2,30839.2,33841.4,39836.6,37541.9,26747.9,19091.0,40729.1,37009.5,41693.5,38949.3,36880.0,36074.5,39750.1,34468.0,36268.0,41459.3,41595.7,31913.6,36962.6,42242.1,38673.8,41821.9,35358.2,38802.6,38705.1,38263.5,41344.8,39993.5,35732.9,34531.4,45588.8,40921.0,36880.4,38059.4,36557.1,39666.9,29659.7,32871.4,33048.7,13785.4,11061.9,25479.4,26941.0,40189.6,33952.7,38843.5,39758.0,44166.2,44126.5,32801.7,37107.9,36678.8,18676.0,36214.4,38034.0,34374.1,35037.7,33855.5,18579.0,37470.3,40714.5,26607.2,29370.0,38876.6,30730.1,26538.1,5463.4,33028.9,32508.0,28632.8,40303.0,39843.1,42588.1,41675.9,41191.6,40928.5,43912.4,47001.9,42041.0,44321.2,43281.3,37725.2,43718.0,46322.0,41703.2,40945.5,40231.8,42291.5,41186.2,38908.2,37563.7,41088.1,40616.9,39180.0,39582.2,37770.9,41653.1,32565.9,43315.5\nTWLEAGSCNKFWCHY,43256.5,39229.2,31846.7,41249.1,22922.0,35472.1,37847.5,39274.3,16725.9,27082.6,43111.9,39658.3,42651.4,38557.6,39526.6,38566.8,40368.5,37618.1,37232.6,43056.4,42567.0,19631.5,29886.2,16257.7,27592.6,35667.9,34360.5,34792.0,36311.3,35826.1,8614.4,36054.8,17277.1,27552.0,41202.4,32120.8,35727.7,35292.4,40278.2,37839.7,31664.9,26671.4,39115.9,37688.0,34587.6,36326.4,36058.9,41403.8,16078.0,31715.7,31813.5,45128.2,40035.7,22478.7,35077.4,31707.1,28745.7,39056.4,39072.2,36471.8,36018.2,34942.6,37269.5,32201.6,29156.5,34952.9,29944.8,40025.6,34986.7,32836.4,38958.8,30948.0,30916.8,30316.3,40416.7,40818.4,32363.8,34988.8,37628.4,41676.8,43469.2,44015.7,41286.8,45040.6,42839.7,37323.5,42974.6,44799.3,39990.6,40266.6,39825.9,40864.0,41757.8,29762.6,32694.0,26427.3,41433.5,31461.3,37996.6,20136.4,41865.3,38221.3,43083.3\nEEATDSRNYMRRL,43309.9,39975.1,36980.8,40770.0,26845.4,31663.4,18520.3,40400.6,32317.1,28377.1,38169.4,35509.3,39890.3,41532.5,38309.8,38115.8,40846.9,36438.1,37093.9,42930.5,39579.2,34478.8,37658.8,40705.5,38297.7,41800.1,33067.9,33014.0,32985.0,34258.1,41326.6,38227.3,39134.9,32951.4,42209.4,41582.2,33913.4,37663.4,33216.3,38646.4,30554.2,33276.9,33973.5,30058.9,25584.4,33144.6,32602.9,38320.0,33672.5,39174.1,40919.7,42064.5,40530.9,28407.8,33487.5,32993.4,42157.0,44495.1,34922.9,31182.7,21642.8,18956.3,29151.9,23436.5,17335.5,28156.7,30184.3,19678.7,31271.4,39334.2,41414.2,31418.8,31800.9,16677.8,38726.9,35946.1,37806.4,36625.3,40075.2,36961.7,37460.0,40782.8,38173.0,42921.7,31447.5,38095.7,42302.2,46969.4,34483.4,35123.9,35823.2,43203.7,44314.5,30994.0,34785.4,35043.5,42765.9,39533.0,35406.3,35756.6,39472.2,30912.6,39842.7\nMSGDACND,39531.0,39404.3,42689.8,36631.0,24512.5,31873.8,22499.5,35348.8,27067.7,27959.4,36059.9,36380.6,40222.8,39885.0,35657.4,36346.6,37150.1,33696.8,35618.9,39815.2,37296.2,30732.1,30971.3,38186.1,33521.8,38861.9,32692.8,37045.0,37182.4,35007.3,35006.2,30627.5,21765.5,28733.2,38896.4,31030.8,34166.9,25154.9,37154.9,33131.6,26235.8,28117.1,32986.3,35470.5,26935.0,28876.1,27638.4,38167.8,21090.3,31994.7,32821.9,38174.9,26573.1,31407.1,35055.4,35544.4,35526.5,41595.8,36912.0,35058.5,32903.2,31917.8,28423.1,34514.3,39786.8,33805.7,23372.3,35819.7,25962.6,35199.4,35434.9,20873.3,10865.1,30611.6,26672.0,16901.8,32916.3,33854.9,34604.6,15853.9,22614.9,36981.8,36604.8,42344.9,37295.1,38385.0,39678.6,45767.9,36136.8,38201.6,35585.8,29834.5,26730.9,29939.4,17543.7,24477.3,36685.4,33534.9,40217.3,34840.3,25639.9,33476.0,38813.7\n\"\"\".strip()\n\ndef run_and_check(n_jobs=0, delete=True, additional_args=[]):\n    models_dir = tempfile.mkdtemp(prefix=\"mhcflurry-test-models\")\n    hyperparameters_filename = os.path.join(\n        models_dir, \"hyperparameters.yaml\")\n    with open(hyperparameters_filename, \"w\") as fd:\n        json.dump(HYPERPARAMETERS_LIST, fd)\n\n    pretrain_data_filename = os.path.join(\n        models_dir, \"pretrain_data.csv\")\n    with open(pretrain_data_filename, \"w\") as fd:\n        fd.write(PRETRAIN_DATA)\n        fd.write(\"\\n\")\n\n    data_df = pandas.read_csv(\n        get_path(\"data_curated\", \"curated_training_data.affinity.csv.bz2\"))\n    selected_data_df = data_df.sample(frac=0.1)\n    selected_data_df.to_csv(\n        os.path.join(models_dir, \"_train_data.csv\"), index=False)\n\n    args = mhcflurry_cli(\"mhcflurry-class1-train-pan-allele-models\") + [\n        \"--data\", os.path.join(models_dir, \"_train_data.csv\"),\n        \"--allele-sequences\", get_path(\"allele_sequences\", \"allele_sequences.csv\"),\n        \"--pretrain-data\", pretrain_data_filename,\n        \"--hyperparameters\", hyperparameters_filename,\n        \"--out-models-dir\", models_dir,\n        \"--num-jobs\", str(n_jobs),\n        \"--num-folds\", \"2\",\n        \"--verbosity\", \"1\",\n    ] + additional_args\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    # Run model selection\n    models_dir_selected = tempfile.mkdtemp(\n        prefix=\"mhcflurry-test-models-selected\")\n    args = mhcflurry_cli(\"mhcflurry-class1-select-pan-allele-models\") + [\n        \"--data\", os.path.join(models_dir, \"train_data.csv.bz2\"),\n        \"--models-dir\", models_dir,\n        \"--out-models-dir\", models_dir_selected,\n        \"--max-models\", \"1\",\n        \"--num-jobs\", str(n_jobs),\n    ] + additional_args\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    result = Class1AffinityPredictor.load(\n        models_dir_selected, optimization_level=0)\n    assert len(result.neural_networks) == 2\n    predictions = result.predict(peptides=[\"SLYNTVATL\"],\n        alleles=[\"HLA-A*02:01\"])\n    assert predictions.shape == (1,)\n    assert_array_less(predictions, 2000)\n\n    if delete:\n        print(\"Deleting: %s\" % models_dir)\n        shutil.rmtree(models_dir)\n        shutil.rmtree(models_dir_selected)\n\n\ndef test_run_parallel():\n    run_and_check(n_jobs=1)\n    run_and_check(n_jobs=2)\n\n\ndef test_run_serial():\n    run_and_check(n_jobs=0)\n\n\ndef test_run_cluster_parallelism():\n    run_and_check(n_jobs=0, additional_args=[\n        '--cluster-parallelism',\n        '--cluster-results-workdir', '/tmp/'\n    ])\n\n\nif __name__ == \"__main__\":\n    # run_and_check(n_jobs=0, delete=False)\n    test_run_cluster_parallelism()\n"
  },
  {
    "path": "test/test_train_processing_models_command.py",
    "content": "\"\"\"\nTest processing train and model selection commands.\n\"\"\"\n\nimport json\nimport os\nimport shutil\nimport tempfile\nimport subprocess\nimport re\nimport pytest\n\nfrom sklearn.metrics import roc_auc_score\nimport pandas\n\nfrom mhcflurry.class1_processing_predictor import Class1ProcessingPredictor\nfrom mhcflurry.common import random_peptides\n\nfrom mhcflurry.testing_utils import cleanup, startup\nfrom .pytest_helpers import mhcflurry_cli\n\n\npytest.fixture(autouse=True, scope=\"module\")\ndef setup_module():\n    startup()\n    yield\n    cleanup()\n\nos.environ[\"CUDA_VISIBLE_DEVICES\"] = \"\"\n\nHYPERPARAMETERS = [\n    {\n        \"max_epochs\": 100,\n        \"n_flank_length\": 5,\n        \"c_flank_length\": 5,\n        \"convolutional_kernel_size\": 3,\n    },\n    {\n        \"max_epochs\": 1,\n        \"n_flank_length\": 5,\n        \"c_flank_length\": 5,\n        \"convolutional_kernel_size\": 3,\n    }\n]\n\n\ndef make_dataset(num=10000):\n    df = pandas.DataFrame({\n        \"n_flank\": random_peptides(num / 2, 10) + random_peptides(num / 2, 1),\n        \"c_flank\": random_peptides(num, 10),\n        \"peptide\": random_peptides(num / 2, 11) + random_peptides(num / 2, 8),\n    }).sample(frac=1.0)\n    df[\"sample_id\"] = pandas.Series(\n        [\"sample_%d\" % (i + 1) for i in range(5)]).sample(\n        n=len(df), replace=True).values\n\n    n_regex = \"[AILQSVWEN].[MNPQYKV]\"\n\n    def is_hit(n_flank, c_flank, peptide):\n        if re.search(n_regex, peptide):\n            return False  # peptide is cleaved\n        return bool(re.match(n_regex, n_flank[-1:] + peptide))\n\n    df[\"hit\"] = [\n        is_hit(row.n_flank, row.c_flank, row.peptide)\n        for (_, row) in df.iterrows()\n    ]\n\n    train_df = df.sample(frac=0.9)\n    test_df = df.loc[~df.index.isin(train_df.index)].copy()\n\n    print(\n        \"Generated dataset\",\n        len(df),\n        \"hits: \",\n        df.hit.sum(),\n        \"frac:\",\n        df.hit.mean())\n\n    return (train_df, test_df)\n\n\ndef run_and_check(n_jobs=0, additional_args=[], delete=False):\n    (train_df, test_df) = make_dataset()\n\n    models_dir = tempfile.mkdtemp(prefix=\"mhcflurry-test-models\")\n    hyperparameters_filename = os.path.join(\n        models_dir, \"hyperparameters.yaml\")\n    with open(hyperparameters_filename, \"w\") as fd:\n        json.dump(HYPERPARAMETERS, fd)\n\n    train_filename = os.path.join(models_dir, \"training.csv\")\n    train_df.to_csv(train_filename, index=False)\n\n    args = mhcflurry_cli(\"mhcflurry-class1-train-processing-models\") + [\n        \"--data\", train_filename,\n        \"--hyperparameters\", hyperparameters_filename,\n        \"--out-models-dir\", models_dir,\n        \"--held-out-samples\", \"2\",\n        \"--num-folds\", \"2\",\n        \"--num-jobs\", str(n_jobs),\n    ]\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    full_predictor = Class1ProcessingPredictor.load(models_dir)\n    print(\"Loaded models\", len(full_predictor.models))\n    assert len(full_predictor.models) == 4\n\n    test_df[\"full_predictor\"] = full_predictor.predict(\n        test_df.peptide.values,\n        test_df.n_flank.values,\n        test_df.c_flank.values)\n\n    test_auc = roc_auc_score(test_df.hit.values, test_df.full_predictor.values)\n    print(\"Full predictor auc\", test_auc)\n\n    print(\"Performing model selection.\")\n\n    # Run model selection\n    models_dir_selected = tempfile.mkdtemp(\n        prefix=\"mhcflurry-test-models-selected\")\n    args = mhcflurry_cli(\"mhcflurry-class1-select-processing-models\") + [\n        \"--data\", os.path.join(models_dir, \"train_data.csv.bz2\"),\n        \"--models-dir\", models_dir,\n        \"--out-models-dir\", models_dir_selected,\n        \"--max-models\", \"1\",\n        \"--num-jobs\", str(n_jobs),\n    ] + additional_args\n    print(\"Running with args: %s\" % args)\n    subprocess.check_call(args)\n\n    selected_predictor = Class1ProcessingPredictor.load(models_dir_selected)\n    assert len(selected_predictor.models) == 2\n\n    test_df[\"selected_predictor\"] = selected_predictor.predict(\n        test_df.peptide.values,\n        test_df.n_flank.values,\n        test_df.c_flank.values)\n\n    test_auc = roc_auc_score(test_df.hit.values, test_df.selected_predictor.values)\n    print(\"Selected predictor auc\", test_auc)\n\n    if delete:\n        print(\"Deleting: %s\" % models_dir)\n        shutil.rmtree(models_dir)\n        shutil.rmtree(models_dir_selected)\n\ndef Xtest_run_parallel():\n    run_and_check(n_jobs=2)\n\n\ndef test_run_serial():\n    run_and_check(n_jobs=0)\n"
  },
  {
    "path": "test/test_training_variants.py",
    "content": "\"\"\"\nTraining variant tests for PyTorch migration.\n\nTests training with different hyperparameter combinations that are valid\nbut not exercised by the existing test suite, plus a functional test that\ntrains a single network on synthetic A*02:01-motif data and verifies that\na known epitope is predicted as a strong binder.\n\"\"\"\nimport random\n\nimport numpy as np\nimport pytest\nimport torch\n\nfrom mhcflurry.class1_neural_network import Class1NeuralNetwork\nfrom mhcflurry.common import random_peptides\nfrom mhcflurry.testing_utils import startup, cleanup\n\n\n@pytest.fixture(autouse=True)\ndef setup_teardown():\n    startup()\n    yield\n    cleanup()\n\n\ndef _seed(s=42):\n    np.random.seed(s)\n    random.seed(s)\n    torch.manual_seed(s)\n\n\ndef _make_model(**overrides):\n    defaults = dict(\n        activation=\"tanh\",\n        layer_sizes=[16],\n        locally_connected_layers=[],\n        peptide_dense_layer_sizes=[],\n        allele_dense_layer_sizes=[],\n        dropout_probability=0.0,\n        batch_normalization=False,\n        dense_layer_l1_regularization=0.0,\n        dense_layer_l2_regularization=0.0,\n        max_epochs=30,\n        early_stopping=False,\n        validation_split=0.0,\n        minibatch_size=32,\n        random_negative_rate=0.0,\n        random_negative_constant=0,\n    )\n    defaults.update(overrides)\n    return Class1NeuralNetwork(**defaults)\n\n\n# ---------------------------------------------------------------------------\n# Training with locally connected layers\n# ---------------------------------------------------------------------------\n\ndef test_train_with_locally_connected():\n    _seed(1)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        locally_connected_layers=[\n            {\"filters\": 4, \"activation\": \"tanh\", \"kernel_size\": 3},\n        ],\n        layer_sizes=[8],\n        max_epochs=10,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n    assert preds.min() > 0\n\n\n# ---------------------------------------------------------------------------\n# Training with dropout\n# ---------------------------------------------------------------------------\n\ndef test_train_with_dropout():\n    _seed(2)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        dropout_probability=0.5,  # keep probability\n        layer_sizes=[16, 8],\n        max_epochs=10,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n\n\n# ---------------------------------------------------------------------------\n# Training with batch normalization\n# ---------------------------------------------------------------------------\n\ndef test_train_with_batch_normalization():\n    _seed(3)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        batch_normalization=True,\n        activation=\"relu\",\n        layer_sizes=[16],\n        max_epochs=10,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n\n\n# ---------------------------------------------------------------------------\n# Training with combined options: LC + dropout + batch norm\n# ---------------------------------------------------------------------------\n\ndef test_train_lc_dropout_batchnorm():\n    _seed(4)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        locally_connected_layers=[\n            {\"filters\": 4, \"activation\": \"tanh\", \"kernel_size\": 3},\n        ],\n        dropout_probability=0.8,\n        batch_normalization=True,\n        layer_sizes=[16, 8],\n        max_epochs=10,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n\n\n# ---------------------------------------------------------------------------\n# Training with skip-connections (DenseNet) topology\n# ---------------------------------------------------------------------------\n\ndef test_train_with_skip_connections():\n    _seed(5)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        topology=\"with-skip-connections\",\n        layer_sizes=[8, 8, 4],\n        max_epochs=10,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n\n\n# ---------------------------------------------------------------------------\n# Training with peptide dense layers\n# ---------------------------------------------------------------------------\n\ndef test_train_with_peptide_dense_layers():\n    _seed(6)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        peptide_dense_layer_sizes=[16],\n        layer_sizes=[8],\n        max_epochs=10,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n\n\n# ---------------------------------------------------------------------------\n# Training with different optimizers\n# ---------------------------------------------------------------------------\n\n@pytest.mark.parametrize(\"optimizer\", [\"adam\", \"sgd\", \"rmsprop\"])\ndef test_train_with_optimizer(optimizer):\n    _seed(7)\n    peptides = random_peptides(60, length=9)\n    affinities = np.random.uniform(10, 50000, 60)\n\n    model = _make_model(\n        optimizer=optimizer,\n        learning_rate=0.01,\n        layer_sizes=[8],\n        max_epochs=5,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 60\n\n\n# ---------------------------------------------------------------------------\n# Training with L2 regularization\n# ---------------------------------------------------------------------------\n\ndef test_train_with_l2_regularization():\n    _seed(8)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        dense_layer_l2_regularization=0.01,\n        layer_sizes=[8],\n        max_epochs=10,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n\n\n# ---------------------------------------------------------------------------\n# Training with L1 + L2 regularization combined\n# ---------------------------------------------------------------------------\n\ndef test_train_with_l1_l2_regularization():\n    _seed(9)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        dense_layer_l1_regularization=0.01,\n        dense_layer_l2_regularization=0.01,\n        layer_sizes=[8],\n        max_epochs=10,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n\n\n# ---------------------------------------------------------------------------\n# Training with random negatives\n# ---------------------------------------------------------------------------\n\ndef test_train_with_random_negatives():\n    _seed(10)\n    peptides = random_peptides(80, length=9)\n    affinities = np.random.uniform(10, 50000, 80)\n\n    model = _make_model(\n        random_negative_rate=1.0,\n        random_negative_constant=10,\n        layer_sizes=[8],\n        max_epochs=5,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 80\n\n\n# ---------------------------------------------------------------------------\n# Training with validation split + early stopping\n# ---------------------------------------------------------------------------\n\ndef test_train_with_early_stopping():\n    _seed(11)\n    peptides = random_peptides(100, length=9)\n    affinities = np.random.uniform(10, 50000, 100)\n\n    model = _make_model(\n        validation_split=0.2,\n        early_stopping=True,\n        patience=3,\n        max_epochs=200,\n        layer_sizes=[8],\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 100\n    # Should have stopped early (well before 200)\n    n_epochs = len(model.fit_info[-1][\"loss\"])\n    assert n_epochs < 200\n\n\n# ---------------------------------------------------------------------------\n# Serialization round-trip preserves predictions after combined-option training\n# ---------------------------------------------------------------------------\n\ndef test_serialization_with_lc_dropout_batchnorm():\n    _seed(12)\n    peptides = random_peptides(60, length=9)\n    affinities = np.random.uniform(10, 50000, 60)\n\n    model = _make_model(\n        locally_connected_layers=[\n            {\"filters\": 4, \"activation\": \"tanh\", \"kernel_size\": 3},\n        ],\n        dropout_probability=0.8,\n        batch_normalization=True,\n        layer_sizes=[8],\n        max_epochs=5,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds_before = model.predict(peptides)\n\n    config = model.get_config()\n    weights = model.get_weights()\n    restored = Class1NeuralNetwork.from_config(config, weights=weights)\n    preds_after = restored.predict(peptides)\n\n    np.testing.assert_allclose(preds_before, preds_after, rtol=1e-5)\n\n\n# ---------------------------------------------------------------------------\n# Training with mixed-length peptides\n# ---------------------------------------------------------------------------\n\ndef test_train_mixed_lengths_with_lc():\n    _seed(13)\n    peptides = (\n        random_peptides(30, length=8) +\n        random_peptides(30, length=9) +\n        random_peptides(20, length=10) +\n        random_peptides(10, length=11)\n    )\n    affinities = np.random.uniform(10, 50000, 90)\n\n    model = _make_model(\n        locally_connected_layers=[\n            {\"filters\": 4, \"activation\": \"tanh\", \"kernel_size\": 3},\n        ],\n        layer_sizes=[8],\n        max_epochs=5,\n    )\n    model.fit(peptides, affinities, verbose=0)\n    preds = model.predict(peptides)\n    assert len(preds) == 90\n\n\n# ---------------------------------------------------------------------------\n# Functional test: learn A*02:01 motif from synthetic data\n# ---------------------------------------------------------------------------\n\n_A0201_P2 = list(\"LM\")        # anchor at position 2\n_A0201_P9 = list(\"LVI\")       # anchor at C-terminal position\n_OTHER_AA = list(\"ACDEFGHIKNPQRSTVWY\")  # non-anchor residues\n\n\ndef _random_aa(choices, rng):\n    return choices[rng.randint(0, len(choices) - 1)]\n\n\ndef _generate_a0201_binder(rng, length=9):\n    \"\"\"Generate a peptide with canonical A*02:01 P2+P9 motifs.\"\"\"\n    pep = [_random_aa(_OTHER_AA, rng) for _ in range(length)]\n    pep[1] = _random_aa(_A0201_P2, rng)          # P2 anchor\n    pep[length - 1] = _random_aa(_A0201_P9, rng)  # Pend anchor\n    return \"\".join(pep)\n\n\ndef _generate_non_binder(rng, length=9):\n    \"\"\"Generate a peptide that avoids A*02:01 anchors at P2 and Pend.\"\"\"\n    non_p2 = [aa for aa in _OTHER_AA if aa not in _A0201_P2]\n    non_p9 = [aa for aa in _OTHER_AA if aa not in _A0201_P9]\n    pep = [_random_aa(_OTHER_AA, rng) for _ in range(length)]\n    pep[1] = _random_aa(non_p2, rng)\n    pep[length - 1] = _random_aa(non_p9, rng)\n    return \"\".join(pep)\n\n\ndef test_learn_a0201_motif():\n    \"\"\"\n    Train a single Class1NeuralNetwork on 200 synthetic peptides\n    (100 binders with A*02:01-like P2/Pend motifs at 1 nM,\n     100 non-binders at 50000 nM) and verify that SLLQHLIGL\n    (a canonical A*02:01 epitope with P2=L, P9=L) is predicted\n    as a strong binder (<= 500 nM).\n    \"\"\"\n    rng = np.random.RandomState(314)\n\n    binders = [_generate_a0201_binder(rng) for _ in range(100)]\n    non_binders = [_generate_non_binder(rng) for _ in range(100)]\n\n    peptides = binders + non_binders\n    affinities = np.concatenate([\n        np.full(100, 1.0),      # strong binders\n        np.full(100, 50000.0),  # non-binders\n    ])\n\n    _seed(271)\n    model = _make_model(\n        locally_connected_layers=[\n            {\"filters\": 8, \"activation\": \"tanh\", \"kernel_size\": 3},\n        ],\n        layer_sizes=[32],\n        max_epochs=200,\n        early_stopping=False,\n        validation_split=0.0,\n        learning_rate=0.001,\n        optimizer=\"adam\",\n        minibatch_size=32,\n        dense_layer_l1_regularization=0.0,\n    )\n    model.fit(peptides, affinities, verbose=0)\n\n    # SLLQHLIGL — HLA-A*02:01 Tax epitope\n    # P2 = L (canonical A*02:01 anchor), P9 = L (canonical A*02:01 anchor)\n    test_pred = model.predict([\"SLLQHLIGL\"])[0]\n    print(f\"SLLQHLIGL predicted affinity: {test_pred:.1f} nM\")\n    assert test_pred <= 500, (\n        f\"SLLQHLIGL should be predicted as strong binder, got {test_pred:.1f} nM\"\n    )\n\n    # A peptide with wrong anchors should be predicted as weak binder\n    weak_pred = model.predict([\"SAAQHQIGA\"])[0]\n    print(f\"SAAQHQIGA predicted affinity: {weak_pred:.1f} nM\")\n    assert weak_pred > 1000, (\n        f\"Non-motif peptide should be predicted weak, got {weak_pred:.1f} nM\"\n    )\n"
  },
  {
    "path": "test-environment.yml",
    "content": "name: test-environment\nchannels:\n  - conda-forge\n  - defaults\ndependencies:\n  - pip\n"
  }
]