Full Code of tiiuae/sbomnix for AI

main 6fb9c6707ff3 cached
186 files
903.4 KB
225.3k tokens
841 symbols
1 requests
Download .txt
Showing preview only (957K chars total). Download the full file or copy to clipboard to get everything.
Repository: tiiuae/sbomnix
Branch: main
Commit: 6fb9c6707ff3
Files: 186
Total size: 903.4 KB

Directory structure:
gitextract_rba_m7yr/

├── .envrc
├── .github/
│   ├── dependabot.yml
│   └── workflows/
│       ├── codeql.yml
│       ├── dependency-review.yml
│       ├── release_sbomnix.yml
│       ├── scorecards.yml
│       └── test_sbomnix.yml
├── .gitignore
├── .gitlint
├── LICENSES/
│   ├── Apache-2.0.txt
│   ├── BSD-3-Clause.txt
│   ├── CC-BY-3.0.txt
│   ├── CC-BY-SA-4.0.txt
│   └── MIT.txt
├── README.md
├── REUSE.toml
├── VERSION
├── default.nix
├── doc/
│   ├── nix_outdated.md
│   ├── nixgraph.md
│   ├── nixmeta.md
│   ├── provenance.md
│   ├── repology_cli.md
│   └── vulnxscan.md
├── flake.nix
├── nix/
│   ├── apps.nix
│   ├── default.nix
│   ├── formatter.nix
│   ├── git-hooks.nix
│   └── packages.nix
├── pyproject.toml
├── pyrightconfig.json
├── pytest.ini
├── scripts/
│   ├── check-fast.sh
│   ├── check-full.sh
│   ├── release-asset.sh
│   └── run-pytest-lane.sh
├── shell.nix
├── src/
│   ├── common/
│   │   ├── __init__.py
│   │   ├── cli_args.py
│   │   ├── columns.py
│   │   ├── df.py
│   │   ├── errors.py
│   │   ├── flakeref.py
│   │   ├── http.py
│   │   ├── log.py
│   │   ├── nix_utils.py
│   │   ├── package_names.py
│   │   ├── pkgmeta.py
│   │   ├── proc.py
│   │   ├── regex.py
│   │   ├── spdx.py
│   │   └── versioning.py
│   ├── nixgraph/
│   │   ├── __init__.py
│   │   ├── graph.py
│   │   ├── main.py
│   │   └── render.py
│   ├── nixmeta/
│   │   ├── __init__.py
│   │   ├── flake_metadata.py
│   │   ├── main.py
│   │   ├── metadata_json.py
│   │   └── scanner.py
│   ├── nixupdate/
│   │   ├── __init__.py
│   │   ├── nix_outdated.py
│   │   ├── nix_visualize.py
│   │   ├── pipeline.py
│   │   └── report.py
│   ├── provenance/
│   │   ├── __init__.py
│   │   ├── dependencies.py
│   │   ├── digests.py
│   │   ├── main.py
│   │   ├── nix_commands.py
│   │   ├── path_info.py
│   │   ├── schema.py
│   │   └── subjects.py
│   ├── repology/
│   │   ├── __init__.py
│   │   ├── adapter.py
│   │   ├── cves.py
│   │   ├── exceptions.py
│   │   ├── projects_parser.py
│   │   ├── repology_cli.py
│   │   ├── repology_cve.py
│   │   ├── reporting.py
│   │   ├── sbom.py
│   │   └── session.py
│   ├── sbomnix/
│   │   ├── __init__.py
│   │   ├── builder.py
│   │   ├── cdx.py
│   │   ├── cli_utils.py
│   │   ├── closure.py
│   │   ├── components.py
│   │   ├── cpe.py
│   │   ├── dependency_index.py
│   │   ├── derivation.py
│   │   ├── derivers.py
│   │   ├── dfcache.py
│   │   ├── exporters.py
│   │   ├── main.py
│   │   ├── meta.py
│   │   ├── meta_source.py
│   │   ├── runtime.py
│   │   └── vuln_enrichment.py
│   └── vulnxscan/
│       ├── __init__.py
│       ├── github_prs.py
│       ├── osv.py
│       ├── osv_client.py
│       ├── parsers.py
│       ├── repology_lookup.py
│       ├── reporting.py
│       ├── scanners.py
│       ├── triage.py
│       ├── utils.py
│       ├── vulnscan.py
│       ├── vulnxscan_cli.py
│       └── whitelist.py
└── tests/
    ├── __init__.py
    ├── compare_deps.py
    ├── compare_sboms.py
    ├── conftest.py
    ├── integration/
    │   ├── __init__.py
    │   ├── test_nixgraph_cli.py
    │   ├── test_nixmeta_cli.py
    │   ├── test_nixupdate_cli.py
    │   ├── test_provenance_cli.py
    │   ├── test_repology_cli.py
    │   ├── test_sbomnix_cli.py
    │   └── test_vulnxscan_cli.py
    ├── resources/
    │   ├── README.md
    │   ├── cdx_bom-1.3.schema.json
    │   ├── cdx_bom-1.4.schema.json
    │   ├── grype-test-db.tar.gz.license
    │   ├── jsf-0.82.schema.json
    │   ├── make_grype_test_db.py
    │   ├── nixmeta-package-set.nix
    │   ├── provenance-1.0.schema.json
    │   ├── repology/
    │   │   ├── cves_openssl.html
    │   │   ├── projects_empty.html
    │   │   └── projects_hello.html
    │   ├── sample_cdx_sbom.json
    │   ├── spdx.schema.json
    │   ├── spdx_bom-2.3.schema.json
    │   └── test-derivation-chain.nix
    ├── test_builder_runtime.py
    ├── test_buildtime_closure.py
    ├── test_cli_conventions.py
    ├── test_cli_error_boundaries.py
    ├── test_cli_smoke.py
    ├── test_common_log.py
    ├── test_common_versioning.py
    ├── test_compare_deps.py
    ├── test_components.py
    ├── test_cpe.py
    ├── test_dependency_index.py
    ├── test_derivation_hardening.py
    ├── test_flakeref_resolution.py
    ├── test_library_exceptions.py
    ├── test_nix_cli_argv.py
    ├── test_nix_outdated_pipeline.py
    ├── test_nix_target_resolution.py
    ├── test_nix_utils_parsing.py
    ├── test_nixgraph_graph.py
    ├── test_nixmeta_parsing.py
    ├── test_nixmeta_progress.py
    ├── test_nixmeta_source.py
    ├── test_nixmeta_source_export.py
    ├── test_osv_client.py
    ├── test_provenance_batching.py
    ├── test_provenance_path_info.py
    ├── test_provenance_subjects.py
    ├── test_repology_adapter.py
    ├── test_repology_cve.py
    ├── test_repology_projects_parser.py
    ├── test_repology_sbom.py
    ├── test_runtime_closure.py
    ├── test_sbom_closure.py
    ├── test_sbom_vuln_enrichment.py
    ├── test_schema_validation.py
    ├── test_store_batching.py
    ├── test_temp_sbom_generation.py
    ├── test_vulnix_test_support.py
    ├── test_vulnxscan_engine.py
    ├── test_vulnxscan_triage.py
    ├── test_whitelist.py
    ├── testpaths.py
    ├── testutils.py
    └── vulnix_test_support.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .envrc
================================================
#! /usr/bin/env bash
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

# try to use flake initially, fallback to non-flake use otherwise
if nix flake show &> /dev/null; then
  use flake
else
  use nix
fi


================================================
FILE: .github/dependabot.yml
================================================
version: 2
updates:
  - package-ecosystem: github-actions
    directory: /
    schedule:
      interval: daily


================================================
FILE: .github/workflows/codeql.yml
================================================
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"

on:
  push:
    branches: ["main"]
  pull_request:
    # The branches below must be a subset of the branches above
    branches: ["main"]
  schedule:
    - cron: "0 0 * * 1"

permissions:
  contents: read

jobs:
  analyze:
    name: Analyze
    runs-on: ubuntu-latest
    permissions:
      actions: read
      contents: read
      security-events: write

    strategy:
      fail-fast: false
      matrix:
        language: ["python"]
        # CodeQL supports [ $supported-codeql-languages ]
        # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support

    steps:
      - name: Harden the runner (Audit all outbound calls)
        uses: step-security/harden-runner@8d3c67de8e2fe68ef647c8db1e6a09f647780f40 # v2.19.0
        with:
          egress-policy: audit

      - name: Checkout repository
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2

      # Initializes the CodeQL tools for scanning.
      - name: Initialize CodeQL
        uses: github/codeql-action/init@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
        with:
          languages: ${{ matrix.language }}
          # If you wish to specify custom queries, you can do so here or in a config file.
          # By default, queries listed here will override any specified in a config file.
          # Prefix the list here with "+" to use these queries and those in the config file.

      # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java).
      # If this step fails, then you should remove it and run the build manually (see below)
      - name: Autobuild
        uses: github/codeql-action/autobuild@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2

      # ℹ️ Command-line programs to run using the OS shell.
      # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun

      #   If the Autobuild fails above, remove it and uncomment the following three lines.
      #   modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.

      # - run: |
      #   echo "Run, Build Application using script"
      #   ./location_of_script_within_repo/buildscript.sh

      - name: Perform CodeQL Analysis
        uses: github/codeql-action/analyze@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
        with:
          category: "/language:${{matrix.language}}"


================================================
FILE: .github/workflows/dependency-review.yml
================================================
# Dependency Review Action
#
# This Action will scan dependency manifest files that change as part of a Pull Request,
# surfacing known-vulnerable versions of the packages declared or updated in the PR.
# Once installed, if the workflow run is marked as required,
# PRs introducing known-vulnerable packages will be blocked from merging.
#
# Source repository: https://github.com/actions/dependency-review-action
name: 'Dependency Review'
on: [pull_request]

permissions:
  contents: read

jobs:
  dependency-review:
    runs-on: ubuntu-latest
    steps:
      - name: Harden the runner (Audit all outbound calls)
        uses: step-security/harden-runner@8d3c67de8e2fe68ef647c8db1e6a09f647780f40 # v2.19.0
        with:
          egress-policy: audit

      - name: 'Checkout Repository'
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
      - name: 'Dependency Review'
        uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0


================================================
FILE: .github/workflows/release_sbomnix.yml
================================================
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

name: Upload Release Asset

on:
  push:
    # Run on push events where tags match v*, e.g. v1.3.0
    tags:
      - 'v*'

permissions:
  contents: read

jobs:
  build:
    name: Upload Release Asset
    runs-on: ubuntu-latest
    permissions:
      contents: write
    steps:
      - name: Harden the runner (Audit all outbound calls)
        uses: step-security/harden-runner@8d3c67de8e2fe68ef647c8db1e6a09f647780f40 # v2.19.0
        with:
          egress-policy: audit

      - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
      - uses: cachix/install-nix-action@ab739621df7a23f52766f9ccc97f38da6b7af14f # v31.10.5
        with:
          nix_path: nixpkgs=channel:nixpkgs-unstable
      - name: Build release asset
        run: ./scripts/release-asset.sh
      - name: Upload release asset
        uses: svenstaro/upload-release-action@29e53e917877a24fad85510ded594ab3c9ca12de # v2
        with:
          repo_token: ${{ secrets.GITHUB_TOKEN }}
          file: build/sbom*
          tag: ${{ github.ref }}
          overwrite: true
          file_glob: true


================================================
FILE: .github/workflows/scorecards.yml
================================================
# This workflow uses actions that are not certified by GitHub. They are provided
# by a third-party and are governed by separate terms of service, privacy
# policy, and support documentation.

name: Scorecard supply-chain security
on:
  # For Branch-Protection check. Only the default branch is supported. See
  # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
  branch_protection_rule:
  # To guarantee Maintained check is occasionally updated. See
  # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
  schedule:
    - cron: '20 7 * * 2'
  push:
    branches: ["main"]

# Declare default permissions as read only.
permissions: read-all

jobs:
  analysis:
    name: Scorecard analysis
    runs-on: ubuntu-latest
    permissions:
      # Needed to upload the results to code-scanning dashboard.
      security-events: write
      # Needed to publish results and get a badge (see publish_results below).
      id-token: write
      contents: read
      actions: read
      # To allow GraphQL ListCommits to work
      issues: read
      pull-requests: read
      # To detect SAST tools
      checks: read

    steps:
      - name: Harden the runner (Audit all outbound calls)
        uses: step-security/harden-runner@8d3c67de8e2fe68ef647c8db1e6a09f647780f40 # v2.19.0
        with:
          egress-policy: audit

      - name: "Checkout code"
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
        with:
          persist-credentials: false

      - name: "Run analysis"
        uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3
        with:
          results_file: results.sarif
          results_format: sarif
          # (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
          # - you want to enable the Branch-Protection check on a *public* repository, or
          # - you are installing Scorecards on a *private* repository
          # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
          # repo_token: ${{ secrets.SCORECARD_TOKEN }}

          # Public repositories:
          #   - Publish results to OpenSSF REST API for easy access by consumers
          #   - Allows the repository to include the Scorecard badge.
          #   - See https://github.com/ossf/scorecard-action#publishing-results.
          # For private repositories:
          #   - `publish_results` will always be set to `false`, regardless
          #     of the value entered here.
          publish_results: true

      # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
      # format to the repository Actions tab.
      - name: "Upload artifact"
        uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
        with:
          name: SARIF file
          path: results.sarif
          retention-days: 5

      # Upload the results to GitHub's code scanning dashboard.
      - name: "Upload to code-scanning"
        uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
        with:
          sarif_file: results.sarif


================================================
FILE: .github/workflows/test_sbomnix.yml
================================================
# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

name: sbomnix checks

on:
  push:
    branches:
      - main
  pull_request:
    branches:
      - main
  workflow_dispatch:

concurrency:
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
  cancel-in-progress: true

permissions:
  contents: read

jobs:
  full:
    name: full lane (${{ matrix.os }})
    strategy:
      fail-fast: false
      matrix:
        os: [ubuntu-latest, macos-latest]
    runs-on: ${{ matrix.os }}
    steps:
      - name: Harden the runner (Audit all outbound calls)
        uses: step-security/harden-runner@8d3c67de8e2fe68ef647c8db1e6a09f647780f40 # v2.19.0
        with:
          egress-policy: audit

      - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
      - uses: cachix/install-nix-action@ab739621df7a23f52766f9ccc97f38da6b7af14f # v31.10.5
        with:
          nix_path: nixpkgs=channel:nixpkgs-unstable
      - name: Print nix version
        run: nix --version
      - name: Run full checks
        run: ./scripts/check-full.sh


================================================
FILE: .gitignore
================================================
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

temp/
venv/
build/
*.egg-info/
.eggs/
__pycache__/
.coverage
.coverage.*
.hypothesis/
coverage.xml
htmlcov/
sbomnix_test_data/
result
*.py[cod]
*.sqlite
*.csv
/*.log
/*.json
!/pyrightconfig.json
/*.png
/*.jpg
/*.pdf
/*.dot
/*.svg
.idea
.direnv
.pre-commit-config.yaml


================================================
FILE: .gitlint
================================================
# SPDX-FileCopyrightText: 2025 TII (SSRC) and the Ghaf contributors
# SPDX-License-Identifier: Apache-2.0

[general]
# Ignore rules, reference them by id or name (comma-separated)
# https://jorisroovers.com/gitlint/latest/rules/builtin_rules/
ignore=body-is-missing

# Enable specific community contributed rules
# https://jorisroovers.com/gitlint/latest/rules/contrib_rules/#available-contrib-rules
contrib=contrib-body-requires-signed-off-by


================================================
FILE: LICENSES/Apache-2.0.txt
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/

TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

1. Definitions.

"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.

"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.

"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.

"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.

"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.

"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.

"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).

"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.

"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."

"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.

2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.

3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.

4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:

     (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and

     (b) You must cause any modified files to carry prominent notices stating that You changed the files; and

     (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and

     (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.

     You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.

5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.

6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.

7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.

8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.

9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.

END OF TERMS AND CONDITIONS

APPENDIX: How to apply the Apache License to your work.

To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!)  The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.

Copyright [yyyy] [name of copyright owner]

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.


================================================
FILE: LICENSES/BSD-3-Clause.txt
================================================
Copyright (c) <year> <owner>. 

Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:

1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.

3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.


================================================
FILE: LICENSES/CC-BY-3.0.txt
================================================
Creative Commons Attribution 3.0 Unported

 CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS LICENSE DOES NOT CREATE AN ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE INFORMATION PROVIDED, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM ITS USE.

License

THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED.

BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. TO THE EXTENT THIS LICENSE MAY BE CONSIDERED TO BE A CONTRACT, THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS.

1. Definitions

     a. "Adaptation" means a work based upon the Work, or upon the Work and other pre-existing works, such as a translation, adaptation, derivative work, arrangement of music or other alterations of a literary or artistic work, or phonogram or performance and includes cinematographic adaptations or any other form in which the Work may be recast, transformed, or adapted including in any form recognizably derived from the original, except that a work that constitutes a Collection will not be considered an Adaptation for the purpose of this License. For the avoidance of doubt, where the Work is a musical work, performance or phonogram, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered an Adaptation for the purpose of this License.

     b. "Collection" means a collection of literary or artistic works, such as encyclopedias and anthologies, or performances, phonograms or broadcasts, or other works or subject matter other than works listed in Section 1(f) below, which, by reason of the selection and arrangement of their contents, constitute intellectual creations, in which the Work is included in its entirety in unmodified form along with one or more other contributions, each constituting separate and independent works in themselves, which together are assembled into a collective whole. A work that constitutes a Collection will not be considered an Adaptation (as defined above) for the purposes of this License.

     c. "Distribute" means to make available to the public the original and copies of the Work or Adaptation, as appropriate, through sale or other transfer of ownership.

     d. "Licensor" means the individual, individuals, entity or entities that offer(s) the Work under the terms of this License.

     e. "Original Author" means, in the case of a literary or artistic work, the individual, individuals, entity or entities who created the Work or if no individual or entity can be identified, the publisher; and in addition (i) in the case of a performance the actors, singers, musicians, dancers, and other persons who act, sing, deliver, declaim, play in, interpret or otherwise perform literary or artistic works or expressions of folklore; (ii) in the case of a phonogram the producer being the person or legal entity who first fixes the sounds of a performance or other sounds; and, (iii) in the case of broadcasts, the organization that transmits the broadcast.

     f. "Work" means the literary and/or artistic work offered under the terms of this License including without limitation any production in the literary, scientific and artistic domain, whatever may be the mode or form of its expression including digital form, such as a book, pamphlet and other writing; a lecture, address, sermon or other work of the same nature; a dramatic or dramatico-musical work; a choreographic work or entertainment in dumb show; a musical composition with or without words; a cinematographic work to which are assimilated works expressed by a process analogous to cinematography; a work of drawing, painting, architecture, sculpture, engraving or lithography; a photographic work to which are assimilated works expressed by a process analogous to photography; a work of applied art; an illustration, map, plan, sketch or three-dimensional work relative to geography, topography, architecture or science; a performance; a broadcast; a phonogram; a compilation of data to the extent it is protected as a copyrightable work; or a work performed by a variety or circus performer to the extent it is not otherwise considered a literary or artistic work.

     g. "You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation.

     h. "Publicly Perform" means to perform public recitations of the Work and to communicate to the public those public recitations, by any means or process, including by wire or wireless means or public digital performances; to make available to the public Works in such a way that members of the public may access these Works from a place and at a place individually chosen by them; to perform the Work to the public by any means or process and the communication to the public of the performances of the Work, including by public digital performance; to broadcast and rebroadcast the Work by any means including signs, sounds or images.

     i. "Reproduce" means to make copies of the Work by any means including without limitation by sound or visual recordings and the right of fixation and reproducing fixations of the Work, including storage of a protected performance or phonogram in digital form or other electronic medium.

2. Fair Dealing Rights. Nothing in this License is intended to reduce, limit, or restrict any uses free from copyright or rights arising from limitations or exceptions that are provided for in connection with the copyright protection under copyright law or other applicable laws.

3. License Grant. Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below:

     a. to Reproduce the Work, to incorporate the Work into one or more Collections, and to Reproduce the Work as incorporated in the Collections;

     b. to create and Reproduce Adaptations provided that any such Adaptation, including any translation in any medium, takes reasonable steps to clearly label, demarcate or otherwise identify that changes were made to the original Work. For example, a translation could be marked "The original work was translated from English to Spanish," or a modification could indicate "The original work has been modified.";

     c. to Distribute and Publicly Perform the Work including as incorporated in Collections; and,

     d. to Distribute and Publicly Perform Adaptations.

     e. For the avoidance of doubt:

          i. Non-waivable Compulsory License Schemes. In those jurisdictions in which the right to collect royalties through any statutory or compulsory licensing scheme cannot be waived, the Licensor reserves the exclusive right to collect such royalties for any exercise by You of the rights granted under this License;

          ii. Waivable Compulsory License Schemes. In those jurisdictions in which the right to collect royalties through any statutory or compulsory licensing scheme can be waived, the Licensor waives the exclusive right to collect such royalties for any exercise by You of the rights granted under this License; and,

          iii. Voluntary License Schemes. The Licensor waives the right to collect royalties, whether individually or, in the event that the Licensor is a member of a collecting society that administers voluntary licensing schemes, via that society, from any exercise by You of the rights granted under this License.

The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. Subject to Section 8(f), all rights not expressly granted by Licensor are hereby reserved.

4. Restrictions. The license granted in Section 3 above is expressly made subject to and limited by the following restrictions:

     a. You may Distribute or Publicly Perform the Work only under the terms of this License. You must include a copy of, or the Uniform Resource Identifier (URI) for, this License with every copy of the Work You Distribute or Publicly Perform. You may not offer or impose any terms on the Work that restrict the terms of this License or the ability of the recipient of the Work to exercise the rights granted to that recipient under the terms of the License. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties with every copy of the Work You Distribute or Publicly Perform. When You Distribute or Publicly Perform the Work, You may not impose any effective technological measures on the Work that restrict the ability of a recipient of the Work from You to exercise the rights granted to that recipient under the terms of the License. This Section 4(a) applies to the Work as incorporated in a Collection, but this does not require the Collection apart from the Work itself to be made subject to the terms of this License. If You create a Collection, upon notice from any Licensor You must, to the extent practicable, remove from the Collection any credit as required by Section 4(b), as requested. If You create an Adaptation, upon notice from any Licensor You must, to the extent practicable, remove from the Adaptation any credit as required by Section 4(b), as requested.

     b. If You Distribute, or Publicly Perform the Work or any Adaptations or Collections, You must, unless a request has been made pursuant to Section 4(a), keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of the Original Author (or pseudonym, if applicable) if supplied, and/or if the Original Author and/or Licensor designate another party or parties (e.g., a sponsor institute, publishing entity, journal) for attribution ("Attribution Parties") in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; (ii) the title of the Work if supplied; (iii) to the extent reasonably practicable, the URI, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and (iv) , consistent with Section 3(b), in the case of an Adaptation, a credit identifying the use of the Work in the Adaptation (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). The credit required by this Section 4 (b) may be implemented in any reasonable manner; provided, however, that in the case of a Adaptation or Collection, at a minimum such credit will appear, if a credit for all contributing authors of the Adaptation or Collection appears, then as part of these credits and in a manner at least as prominent as the credits for the other contributing authors. For the avoidance of doubt, You may only use the credit required by this Section for the purpose of attribution in the manner set out above and, by exercising Your rights under this License, You may not implicitly or explicitly assert or imply any connection with, sponsorship or endorsement by the Original Author, Licensor and/or Attribution Parties, as appropriate, of You or Your use of the Work, without the separate, express prior written permission of the Original Author, Licensor and/or Attribution Parties.

     c. Except as otherwise agreed in writing by the Licensor or as may be otherwise permitted by applicable law, if You Reproduce, Distribute or Publicly Perform the Work either by itself or as part of any Adaptations or Collections, You must not distort, mutilate, modify or take other derogatory action in relation to the Work which would be prejudicial to the Original Author's honor or reputation. Licensor agrees that in those jurisdictions (e.g. Japan), in which any exercise of the right granted in Section 3(b) of this License (the right to make Adaptations) would be deemed to be a distortion, mutilation, modification or other derogatory action prejudicial to the Original Author's honor and reputation, the Licensor will waive or not assert, as appropriate, this Section, to the fullest extent permitted by the applicable national law, to enable You to reasonably exercise Your right under Section 3(b) of this License (right to make Adaptations) but not otherwise.

5. Representations, Warranties and Disclaimer

UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU.

6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.

7. Termination

     a. This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Adaptations or Collections from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License.

     b. Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above.

8. Miscellaneous

     a. Each time You Distribute or Publicly Perform the Work or a Collection, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License.

     b. Each time You Distribute or Publicly Perform an Adaptation, Licensor offers to the recipient a license to the original Work on the same terms and conditions as the license granted to You under this License.

     c. If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.

     d. No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You.

     e. This License may not be modified without the mutual written agreement of the Licensor and You.

     f. The rights granted under, and the subject matter referenced, in this License were drafted utilizing the terminology of the Berne Convention for the Protection of Literary and Artistic Works (as amended on September 28, 1979), the Rome Convention of 1961, the WIPO Copyright Treaty of 1996, the WIPO Performances and Phonograms Treaty of 1996 and the Universal Copyright Convention (as revised on July 24, 1971). These rights and subject matter take effect in the relevant jurisdiction in which the License terms are sought to be enforced according to the corresponding provisions of the implementation of those treaty provisions in the applicable national law. If the standard suite of rights granted under applicable copyright law includes additional rights not granted under this License, such additional rights are deemed to be included in the License; this License is not intended to restrict the license of any rights under applicable law.

Creative Commons Notice

Creative Commons is not a party to this License, and makes no warranty whatsoever in connection with the Work. Creative Commons will not be liable to You or any party on any legal theory for any damages whatsoever, including without limitation any general, special, incidental or consequential damages arising in connection to this license. Notwithstanding the foregoing two (2) sentences, if Creative Commons has expressly identified itself as the Licensor hereunder, it shall have all rights and obligations of Licensor.

Except for the limited purpose of indicating to the public that the Work is licensed under the CCPL, Creative Commons does not authorize the use by either party of the trademark "Creative Commons" or any related trademark or logo of Creative Commons without the prior written consent of Creative Commons. Any permitted use will be in compliance with Creative Commons' then-current trademark usage guidelines, as may be published on its website or otherwise made available upon request from time to time. For the avoidance of doubt, this trademark restriction does not form part of this License.

Creative Commons may be contacted at http://creativecommons.org/.


================================================
FILE: LICENSES/CC-BY-SA-4.0.txt
================================================
Creative Commons Attribution-ShareAlike 4.0 International

 Creative Commons Corporation (“Creative Commons”) is not a law firm and does not provide legal services or legal advice. Distribution of Creative Commons public licenses does not create a lawyer-client or other relationship. Creative Commons makes its licenses and related information available on an “as-is” basis. Creative Commons gives no warranties regarding its licenses, any material licensed under their terms and conditions, or any related information. Creative Commons disclaims all liability for damages resulting from their use to the fullest extent possible.

Using Creative Commons Public Licenses

Creative Commons public licenses provide a standard set of terms and conditions that creators and other rights holders may use to share original works of authorship and other material subject to copyright and certain other rights specified in the public license below. The following considerations are for informational purposes only, are not exhaustive, and do not form part of our licenses.

Considerations for licensors: Our public licenses are intended for use by those authorized to give the public permission to use material in ways otherwise restricted by copyright and certain other rights. Our licenses are irrevocable. Licensors should read and understand the terms and conditions of the license they choose before applying it. Licensors should also secure all rights necessary before applying our licenses so that the public can reuse the material as expected. Licensors should clearly mark any material not subject to the license. This includes other CC-licensed material, or material used under an exception or limitation to copyright. More considerations for licensors.

Considerations for the public: By using one of our public licenses, a licensor grants the public permission to use the licensed material under specified terms and conditions. If the licensor’s permission is not necessary for any reason–for example, because of any applicable exception or limitation to copyright–then that use is not regulated by the license. Our licenses grant only permissions under copyright and certain other rights that a licensor has authority to grant. Use of the licensed material may still be restricted for other reasons, including because others have copyright or other rights in the material. A licensor may make special requests, such as asking that all changes be marked or described.

Although not required by our licenses, you are encouraged to respect those requests where reasonable. More considerations for the public.

Creative Commons Attribution-ShareAlike 4.0 International Public License

By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-ShareAlike 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.

Section 1 – Definitions.

     a.	Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.

     b.	Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License.

     c.	BY-SA Compatible License means a license listed at creativecommons.org/compatiblelicenses, approved by Creative Commons as essentially the equivalent of this Public License.

     d.	Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.

     e.	Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.

     f.	Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.

     g.	License Elements means the license attributes listed in the name of a Creative Commons Public License. The License Elements of this Public License are Attribution and ShareAlike.

     h.	Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.

     i.	Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.

     j.	Licensor means the individual(s) or entity(ies) granting rights under this Public License.

     k.	Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.

     l.	Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.

     m.	You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.

Section 2 – Scope.

     a.	License grant.

          1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:

               A. reproduce and Share the Licensed Material, in whole or in part; and

               B. produce, reproduce, and Share Adapted Material.

          2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.

          3. Term. The term of this Public License is specified in Section 6(a).

          4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material.

          5. Downstream recipients.

               A. Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.

               B. Additional offer from the Licensor – Adapted Material. Every recipient of Adapted Material from You automatically receives an offer from the Licensor to exercise the Licensed Rights in the Adapted Material under the conditions of the Adapter’s License You apply.

               C. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.

          6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).

     b.	Other rights.

          1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.

          2. Patent and trademark rights are not licensed under this Public License.

          3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties.

Section 3 – License Conditions.

Your exercise of the Licensed Rights is expressly made subject to the following conditions.

     a.	Attribution.

          1. If You Share the Licensed Material (including in modified form), You must:

               A. retain the following if it is supplied by the Licensor with the Licensed Material:

                    i.	identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);

                    ii.	a copyright notice;

                    iii. a notice that refers to this Public License;

                    iv.	a notice that refers to the disclaimer of warranties;

                    v.	a URI or hyperlink to the Licensed Material to the extent reasonably practicable;

               B. indicate if You modified the Licensed Material and retain an indication of any previous modifications; and

               C. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License.

          2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.

          3. If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.

     b.	ShareAlike.In addition to the conditions in Section 3(a), if You Share Adapted Material You produce, the following conditions also apply.

          1. The Adapter’s License You apply must be a Creative Commons license with the same License Elements, this version or later, or a BY-SA Compatible License.

          2. You must include the text of, or the URI or hyperlink to, the Adapter's License You apply. You may satisfy this condition in any reasonable manner based on the medium, means, and context in which You Share Adapted Material.

          3. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, Adapted Material that restrict exercise of the rights granted under the Adapter's License You apply.

Section 4 – Sui Generis Database Rights.

Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material:

     a.	for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database;

     b.	if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material, including for purposes of Section 3(b); and

     c.	You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database.
For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.

Section 5 – Disclaimer of Warranties and Limitation of Liability.

     a.	Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You.

     b.	To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You.

     c.	The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability.

Section 6 – Term and Termination.

     a.	This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.

     b.	Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates:

          1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or

          2. upon express reinstatement by the Licensor.

     c.	For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.

     d.	For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.

     e.	Sections 1, 5, 6, 7, and 8 survive termination of this Public License.

Section 7 – Other Terms and Conditions.

     a.	The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.

     b.	Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.

Section 8 – Interpretation.

     a.	For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.

     b.	To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.

     c.	No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.

     d.	Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.

Creative Commons is not a party to its public licenses. Notwithstanding, Creative Commons may elect to apply one of its public licenses to material it publishes and in those instances will be considered the “Licensor.” Except for the limited purpose of indicating that material is shared under a Creative Commons public license or as otherwise permitted by the Creative Commons policies published at creativecommons.org/policies, Creative Commons does not authorize the use of the trademark “Creative Commons” or any other trademark or logo of Creative Commons without its prior written consent including, without limitation, in connection with any unauthorized modifications to any of its public licenses or any other arrangements, understandings, or agreements concerning use of licensed material. For the avoidance of doubt, this paragraph does not form part of the public licenses.

Creative Commons may be contacted at creativecommons.org.


================================================
FILE: LICENSES/MIT.txt
================================================
MIT License

Copyright (c) <year> <copyright holders>

Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.


================================================
FILE: README.md
================================================
<!--
SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)

SPDX-License-Identifier: CC-BY-SA-4.0
-->

# sbomnix

This repository is home to various command line tools and Python libraries that aim to help with software supply chain challenges:
- [`sbomnix`](#generate-sbom) is a utility that generates SBOMs given a [Nix](https://nixos.org/) flake reference or store path.
- [`nixgraph`](./doc/nixgraph.md) helps query and visualize dependency graphs for [Nix](https://nixos.org/) packages.
- [`nixmeta`](./doc/nixmeta.md) summarizes nixpkgs meta-attributes from the given nixpkgs version.
- [`vulnxscan`](./doc/vulnxscan.md) is a vulnerability scanner demonstrating the usage of SBOMs in running vulnerability scans.
- [`repology_cli`](./doc/repology_cli.md) and [`repology_cve`](./doc/repology_cli.md#repology-cve-search) are command line clients to [repology.org](https://repology.org/).
- [`nix_outdated`](./doc/nix_outdated.md) is a utility that finds outdated nix dependencies for given out path, listing the outdated packages in priority order based on how many other packages depend on the given outdated package.
- [`provenance`](./doc/provenance.md) is a command line tool to generate SLSA v1.0 compliant [provenance](https://slsa.dev/spec/v1.0/provenance) attestation files in json format for any nix flake or derivation.

For an example of how to use the tooling provided in this repository to automate daily vulnerability scans for a nix flake project, see: [ghafscan](https://github.com/tiiuae/ghafscan).

The [CycloneDX](https://cyclonedx.org/) and [SPDX](https://spdx.github.io/spdx-spec/v2.3/) SBOMs for each release of sbomnix tooling is available in the [release assets](https://github.com/tiiuae/sbomnix/releases/latest).

All the tools in this repository originate from [Ghaf Framework](https://github.com/tiiuae/ghaf).

Table of Contents
=================

* [Getting Started](#getting-started)
   * [Running as Nix Flake](#running-as-nix-flake)
   * [Running from Nix Development Shell](#running-from-nix-development-shell)
* [Buildtime vs Runtime Dependencies](#buildtime-vs-runtime-dependencies)
   * [Buildtime Dependencies](#buildtime-dependencies)
   * [Runtime Dependencies](#runtime-dependencies)
* [Usage Examples](#usage-examples)
   * [Generate SBOM Based on Flake Reference](#generate-sbom-based-on-flake-reference)
   * [Generate SBOM Based on Derivation File or Out-path](#generate-sbom-based-on-derivation-file-or-out-path)
   * [Generate SBOM Including Buildtime Dependencies](#generate-sbom-including-buildtime-dependencies)
   * [Generate SBOM Based on a Store Path or Result Symlink](#generate-sbom-based-on-a-store-path-or-result-symlink)
   * [Nixpkgs Metadata Source Selection](#nixpkgs-metadata-source-selection)
   * [Visualize Package Dependencies](#visualize-package-dependencies)
* [Contribute](#contribute)
* [License](#license)
* [Acknowledgements](#acknowledgements)

## Getting Started
`sbomnix` requires the [Nix](https://nixos.org/download.html) command line
tool to be in `$PATH`. Direct, non-flake usage requires a modern `nix`
supporting `nix-command` and `--json-format 1`.

### Running as Nix Flake
`sbomnix` can be run as a [Nix flake](https://nixos.wiki/wiki/Flakes) from the `tiiuae/sbomnix` repository:
```bash
# '--' signifies the end of argument list for `nix`.
# '--help' is the first argument to `sbomnix`
$ nix run github:tiiuae/sbomnix#sbomnix -- --help
```

or from a local repository:
```bash
$ git clone https://github.com/tiiuae/sbomnix
$ cd sbomnix
$ nix run .#sbomnix -- --help
```
See the full list of supported flake targets by running `nix flake show`.

### Running from Nix Development Shell

If you have nix flakes [enabled](https://nixos.wiki/wiki/Flakes#Enable_flakes), start a development shell:
```bash
$ git clone https://github.com/tiiuae/sbomnix
$ cd sbomnix
$ nix develop
```

The devshell adds all CLI entry points (`sbomnix`, `nixgraph`, `nixmeta`, `vulnxscan`, `repology_cli`, `repology_cve`, `nix_outdated`, `provenance`) to `PATH`. They run against the local source tree, so any edits are picked up immediately without reinstalling.

All tools support a consistent verbosity flag: no flag or `--verbose=0`
shows INFO output, `-v` or `--verbose=1` enables VERBOSE progress
details, `-vv` or `--verbose=2` enables DEBUG details, and `-vvv` or
`--verbose=3` enables SPAM output. Repeated short flags are counted, so
`-v -v`, `-vv`, and `--verbose=2` are equivalent.

## Buildtime vs Runtime Dependencies
#### Buildtime Dependencies
The buildtime dependencies of a Nix package are the [closure](https://nixos.org/manual/nix/stable/glossary.html#gloss-closure) of its derivation (`.drv` file): all the store paths Nix must have available to reproduce the build, including compilers, build tools, standard libraries, and the infrastructure to bootstrap them. Even a simple hello-world C program typically pulls in over 150 packages, including gcc, stdenv, glibc, and bash. Computing the buildtime dependency closure only requires evaluating the derivation; the target does not need to be built.

For reference, below is a graph of the first two layers of buildtime dependencies of an example hello-world C program (direct dependencies and the first level of transitive dependencies): [C hello-world buildtime, depth=2](doc/img/c_hello_world_buildtime_d2.svg).

#### Runtime Dependencies
[Runtime dependencies](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-why-depends.html#description) are a subset of buildtime dependencies. When Nix builds a package, it scans the build outputs for references to other store paths and records them. The runtime closure is the transitive set of those recorded references: the store paths the built output actually needs at runtime. Because this information is captured during the build, the target must be built before its runtime dependencies can be determined. For reference, below is the complete runtime dependency graph of the same hello-world C program:

<img src="doc/img/c_hello_world_runtime.svg" width="700">

By default, the tools in this repository work with runtime dependencies. Specifically, unless told otherwise, `sbomnix` generates an SBOM of runtime dependencies, `nixgraph` graphs runtime dependencies, and `vulnxscan` and `nix_outdated` scan runtime dependencies. Since the target must be built to determine runtime dependencies, all these tools will build (force-realise) the target as part of their invocation. All tools also accept a `--buildtime` argument to work with buildtime dependencies instead; as noted above, using `--buildtime` does not require building the target.


## Usage Examples
In the below examples, we use Nix package `wget` as an example target, referred to by flakeref `github:NixOS/nixpkgs/nixos-unstable#wget`.

#### Generate SBOM Based on Flake Reference
`sbomnix` accepts [flake references](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#flake-references) as targets:
```bash
$ sbomnix github:NixOS/nixpkgs?ref=nixos-unstable#wget
```

#### Generate SBOM Based on Derivation File or Out-path
Flake references are the recommended target for `sbomnix`. When the target is a flake reference, `sbomnix` can resolve the nixpkgs version used to build the package and enrich the SBOM with metadata such as descriptions, licenses, maintainers, and homepage links. When the target is a store path, there is no information about which nixpkgs version produced it, so metadata enrichment is skipped by default; see [Nixpkgs Metadata Source Selection](#nixpkgs-metadata-source-selection).

By default `sbomnix` scans the given target and generates an SBOM including the runtime dependencies.
Notice: determining the target runtime dependencies in Nix requires building the target.
```bash
# Target can be specified as a flakeref or a nix store path, e.g.:
# sbomnix .
# sbomnix github:tiiuae/sbomnix
# sbomnix nixpkgs#wget
# sbomnix /nix/store/...  (note: nixpkgs metadata not available for store path targets)
# Ref: https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#flake-references
$ sbomnix github:NixOS/nixpkgs/nixos-unstable#wget
...
INFO     Wrote: sbom.cdx.json
INFO     Wrote: sbom.spdx.json
INFO     Wrote: sbom.csv
```
Main outputs are the SBOM json files sbom.cdx.json and sbom.spdx.json in [CycloneDX](https://cyclonedx.org/) and [SPDX](https://spdx.github.io/spdx-spec/v2.3/) formats.

#### Generate SBOM Including Buildtime Dependencies
By default `sbomnix` scans the given target for runtime dependencies. You can tell sbomnix to determine the buildtime dependencies using the `--buildtime` argument.
Below example generates SBOM including buildtime dependencies.
Notice: as opposed to runtime dependencies, determining the buildtime dependencies does not require building the target.
```bash
$ sbomnix github:NixOS/nixpkgs/nixos-unstable#wget --buildtime
```

#### Generate SBOM Based on a Store Path or Result Symlink
`sbomnix` accepts Nix store paths and result symlinks as targets:
```bash
$ sbomnix /path/to/result
```
Note: store paths carry no record of which nixpkgs version produced them, so nixpkgs metadata enrichment is skipped by default. Pass `--meta-nixpkgs` to supply a nixpkgs source explicitly, or see [Nixpkgs Metadata Source Selection](#nixpkgs-metadata-source-selection).

#### Nixpkgs Metadata Source Selection
`sbomnix` enriches packages with nixpkgs metadata, such as descriptions,
licenses, maintainers, and homepage links, when it can select a nixpkgs
source that is tied to the target.

For flakeref targets, `sbomnix` uses the target flake context. NixOS
toplevel flakerefs are handled through the selected NixOS package set, so
overlays, package overrides, nixpkgs config, and system-specific package-set
changes can be represented.

Store-path targets skip nixpkgs metadata by default; pass `--meta-nixpkgs` to
choose the source explicitly.

`--meta-nixpkgs <flakeref-or-path>` scans an explicit nixpkgs source.
`--meta-nixpkgs nix-path` scans the `nixpkgs=` entry from `NIX_PATH` as an
explicit opt-in source. `--exclude-meta` disables this enrichment and cannot be
combined with `--meta-nixpkgs`.

CycloneDX and SPDX outputs record the selected metadata source in document
metadata, including fields such as `nixpkgs:metadata_source_method`,
`nixpkgs:path`, `nixpkgs:rev`, `nixpkgs:flakeref`, `nixpkgs:version`, and
`nixpkgs:message`.

#### Visualize Package Dependencies
`sbomnix` uses structured Nix JSON to find package dependencies where
available. `nixgraph` can also be used as a stand-alone tool for visualizing
package dependencies.
Below, we show an example of visualizing package `wget` runtime dependencies:
```bash
$ nixgraph github:NixOS/nixpkgs/nixos-unstable#wget --depth=2
```

Which outputs the dependency graph as an image (with maxdepth 2):

<img src="doc/img/wget_runtime.svg" width="900">

For more examples on querying and visualizing the package dependencies, see: [nixgraph](./doc/nixgraph.md).

## Contribute
Any pull requests, questions and error reports are welcome.
To start development, we recommend using Nix flakes development shell:
```bash
$ git clone https://github.com/tiiuae/sbomnix
$ cd sbomnix/
$ nix develop
```
Before opening a pull request, run at minimum:
```bash
$ ./scripts/check-fast.sh
```
This runs the formatter, a fast flake eval, and the fast test lane.
CI runs `./scripts/check-full.sh`, which validates the flake and runs the full
test lane with coverage.

To deactivate the Nix devshell, run `exit` in your shell.
To see other Nix flake targets, run `nix flake show`.


## License
This project is licensed under the Apache-2.0 license - see the [Apache-2.0.txt](LICENSES/Apache-2.0.txt) file for details.


## Acknowledgements
Parts of the Nix store derivation loading code in `sbomnix`
([derivation.py](src/sbomnix/derivation.py) and
[derivers.py](src/sbomnix/derivers.py)) originate from
[vulnix](https://github.com/nix-community/vulnix).


================================================
FILE: REUSE.toml
================================================
# SPDX-FileCopyrightText: 2022-2025 Technology Innovation Institute (TII)
# SPDX-License-Identifier: Apache-2.0
version = 1
SPDX-PackageName = "sbomnix"
SPDX-PackageSupplier = "Technology Innovation Institute <https://tii.ae>"
SPDX-PackageDownloadLocation = "https://github.com/tiiuae/sbomnix"

[[annotations]]
SPDX-License-Identifier = "CC-BY-3.0"
SPDX-FileCopyrightText = "2022-2025 Technology Innovation Institute (TII)"
precedence = "closest"
path = [
  "doc/img/*",
]

[[annotations]]
SPDX-License-Identifier = "Apache-2.0"
SPDX-FileCopyrightText = "2022-2025 Technology Innovation Institute (TII)"
precedence = "closest"
path = [
  "**.yml",
  "**.toml",
  "flake.lock",
  "pyrightconfig.json",
  "VERSION",
  "tests/resources/**",
]


================================================
FILE: VERSION
================================================
1.7.6


================================================
FILE: default.nix
================================================
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
# SPDX-FileCopyrightText: 2020-2023 Eelco Dolstra and the flake-compat contributors
#
# SPDX-License-Identifier: MIT
# This file originates from:
# https://github.com/nix-community/flake-compat
# This file provides backward compatibility to nix < 2.4 clients
{
  system ? builtins.currentSystem,
}:
let
  lock = builtins.fromJSON (builtins.readFile ./flake.lock);

  inherit (lock.nodes.flake-compat.locked)
    owner
    repo
    rev
    narHash
    ;

  flake-compat = fetchTarball {
    url = "https://github.com/${owner}/${repo}/archive/${rev}.tar.gz";
    sha256 = narHash;
  };

  flake = import flake-compat {
    inherit system;
    src = ./.;
  };
in
flake.defaultNix


================================================
FILE: doc/nix_outdated.md
================================================
<!--
SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)

SPDX-License-Identifier: CC-BY-SA-4.0
-->

# Getting Started
To get started, follow the [Getting Started](../README.md#getting-started) section from the main [README](../README.md).

As an example, to run the [`nix_outdated`](../src/nixupdate/nix_outdated.py) from the `tiiuae/sbomnix` repository:
```bash
# '--' signifies the end of argument list for `nix`.
# '--help' is the first argument to `nix_outdated`
$ nix run github:tiiuae/sbomnix#nix_outdated -- --help
```

## Example Target
We use Nix package `git` as an example target, referred to by flakeref `github:NixOS/nixpkgs/nixos-unstable#git`.

# nix_outdated
[`nix_outdated`](../src/nixupdate/nix_outdated.py) is a command line tool to list outdated nix dependencies for given target nix out path or flakeref. By default, the script outputs runtime dependencies for the given target that appear outdated in nixpkgs 'nix_unstable' channel - the list of output packages would potentially need a PR to update the package in nixpkgs to the package's latest upstream release version specified in the output table column 'version_upstream'. The list of output packages is in priority order based on how many other packages depend on the potentially outdated package.

Below command finds `git` runtime dependencies that would have an update in the package's upstream repository based on repology, and the latest release version is not available in nix unstable. The captured output is illustrative; exact versions and findings will differ depending on the package versions resolved at run time.

```bash
# Target can be specified as a flakeref or a nix store path, e.g.:
# nix_outdated .
# nix_outdated github:tiiuae/sbomnix
# nix_outdated nixpkgs#git
# nix_outdated /nix/store/...
# Ref: https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#flake-references
$ nix_outdated github:NixOS/nixpkgs/nixos-unstable#git
INFO     Generating SBOM for target '/nix/store/...-git-<version>'
INFO     Dependencies that need update in nixpkgs (in priority order based on how many other packages depend on the potentially outdated package):

|  priority  | nix_package        | version_local   | version_nixpkgs   | version_upstream      |
|------------+--------------------+-----------------+-------------------+-----------------------|
|     9      | libidn2            | 2.3.2           | 2.3.2             | 2.3.4                 |
|     8      | glibc              | 2.35-224        | 2.35-224          | 2.37                  |
|     5      | perl:uri           | 5.05            | 5.05              | 5.17                  |
|     4      | perl:http-message  | 6.26            | 6.26              | 6.44                  |
|     4      | openssl            | 3.0.8           | 3.0.8             | 3.1.0                 |
|     3      | perl:html-parser   | 3.75            | 3.75              | 3.81                  |
|     3      | perl:try-tiny      | 0.30            | 0.30              | 0.31                  |
|     3      | perl:mozilla-ca    | 20200520        | 20200520          | 20221114;20221114.0.0 |
|     2      | perl:digest-hmac   | 1.03            | 1.03              | 1.04                  |
|     2      | sqlite             | 3.40.1          | 3.41.0            | 3.41.1                |
|     2      | perl:fcgi          | 0.79            | 0.79              | 0.82                  |
|     2      | perl:net-http      | 6.19            | 6.19              | 6.22                  |
|     2      | perl:io-socket-ssl | 2.068           | 2.068             | 2.081;2.81.0          |
|     2      | perl:file-listing  | 6.14            | 6.14              | 6.15                  |
|     2      | perl:http-daemon   | 6.14            | 6.14              | 6.16                  |
|     2      | perl:http-cookies  | 6.09            | 6.09              | 6.10;6.10.0           |
|     2      | perl:cgi           | 4.51            | 4.51              | 4.56                  |
|     2      | nghttp2            | 1.51.0          | 1.51.0            | 1.52.0                |
|     2      | perl:test-fatal    | 0.016           | 0.016             | 0.017;0.17.0          |
|     2      | perl:test-needs    | 0.002006        | 0.002006          | 0.002010              |
|     1      | perl:libnet        | 3.12            | 3.12              | 3.14                  |
|     1      | git                | 2.39.2          | 2.39.2            | 2.40.0                |
|     1      | gettext            | 0.21            | 0.21              | 0.21.1                |
|     1      | perl:libwww-perl   | 6.67            | 6.67              | 6.68                  |


INFO     Wrote: nix_outdated.csv
```

As an example, the first row in the above output table means that:
- `libidn2` in nix unstable is not up-to-date with what repology.org knows is the package's newest upstream version.
- `libidn2` is on the top of the table, as it has the highest priority among the listed outdated packages. The priority is based on how many other packages depend on the given outdated package. This datapoint is based on [nix-visualize](https://github.com/craigmbooth/nix-visualize). The value of the `priority` column is directly the `level` value determined by [nix-visualize](https://github.com/craigmbooth/nix-visualize). For full description of the `level` values, see nix-visualize documentation: https://github.com/craigmbooth/nix-visualize#vertical-positioning.
- `libidn2` local version is 2.3.2.
- `libidn2` newest version in nix unstable is 2.3.2 (based on repology.org).
- `libidn2` newest release version in the package's upstream repository is 2.3.4 (based on repology.org).
- `libidn2` is considered outdated, because the version string in `version_upstream` is later than the version string in `version_nixpkgs`.


================================================
FILE: doc/nixgraph.md
================================================
<!--
SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)

SPDX-License-Identifier: CC-BY-SA-4.0
-->

# nixgraph

[`nixgraph`](../src/nixgraph/main.py) is a Python library and command line utility for querying and visualizing dependency graphs for [Nix](https://nixos.org/) packages.


Table of Contents
=================
* [Getting Started](#getting-started)
* [Usage examples](#usage-examples)
   * [Example: package runtime dependencies](#example-package-runtime-dependencies)
   * [Example: depth](#example-depth)
   * [Example: colorize](#example-colorize)
   * [Example: inverse](#example-inverse)
   * [Example: package buildtime dependencies](#example-package-buildtime-dependencies)
   * [Example: output format](#example-output-format)
   * [Example: pathnames](#example-pathnames)


## Getting Started
To get started, follow the [Getting Started](../README.md#getting-started) section from the main [README](../README.md).

As an example, to run the [`nixgraph`](../src/nixgraph/main.py) from your local clone of the `tiiuae/sbomnix` repository:
```bash
# '--' signifies the end of argument list for `nix`.
# '--help' is the first argument to `nixgraph`
$ nix run .#nixgraph -- --help
```

## Usage examples
In the below examples, we use nix package `wget` as an example target, referred to by flakeref `github:NixOS/nixpkgs/nixos-unstable#wget`. The example graphs below are illustrative; the actual graph generated will reflect the dependency versions resolved at run time.

#### Example: package runtime dependencies
```bash
# Target can be specified as a flakeref or a nix store path, e.g.:
# nixgraph .
# nixgraph github:tiiuae/sbomnix
# nixgraph nixpkgs#wget
# nixgraph /nix/store/...
# Ref: https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#flake-references
$ nixgraph github:NixOS/nixpkgs/nixos-unstable#wget

INFO     Wrote: graph.png
```
By default `nixgraph` scans the given target and generates a graph that shows the direct runtime dependencies.
The default output is a png image `graph.png`:

<img src="img/wget_r1.svg">
<br /><br />


#### Example: depth
```bash
$ nixgraph github:NixOS/nixpkgs/nixos-unstable#wget --depth=2
```

By default, when `--depth` argument is not specified, `nixgraph` shows the direct dependencies. Increasing the `--depth` makes `nixgraph` walk the dependency chain deeper. For instance, with `--depth=2`, the output graph for `wget` becomes:

<img src="img/wget_r2.svg" width="900">
<br /><br />

The value of `--depth` indicates the maximum depth between any two nodes in the resulting graph. For instance, in the above example, `libunistring-1.0` gets included with `--depth=2` because the shortest path between `wget` and `libunistring` is two hops deep (`wget --> libidn2 --> libunistring`).

#### Example: colorize
```bash
$ nixgraph github:NixOS/nixpkgs/nixos-unstable#wget --depth=2 --colorize='openssl|libidn'
```

`--colorize` allows highlighting nodes that match the specified regular expression:

<img src="img/wget_r2_col.svg" width="900">
<br /><br />


#### Example: inverse
```bash
$ nixgraph github:NixOS/nixpkgs/nixos-unstable#wget --depth=2 --inverse='glibc'
```

`--inverse` makes it possible to draw the graph backwards starting from nodes that match the specified regular expression. For instance, the above command would show all the dependency paths from `wget` that lead to `glibc`:

<img src="img/wget_r2_inv.svg">
<br /><br />

`--inverse` is especially useful when working with larger graphs.

As an example, consider the following graph for `git`:
(`nixgraph github:NixOS/nixpkgs/nixos-unstable#git --depth=3 --colorize="openssl-3|sqlite-3"`)

<img src="img/git_r2_col.svg" width="900">
<br /><br />

To find out what are all the runtime dependency paths from `git` to the highlighted nodes `openssl` or `sqlite` in the above graph, run the following command:
```bash
# --depth=100: make sure the output graph includes "long enough" dependency chains
# --inverse="openssl-3|sqlite-3": draw the graph backwards starting from nodes that
#                                 match the specified regular expression
# --colorize="openssl-3|sqlite-3": colorize the matching nodes
nixgraph github:NixOS/nixpkgs/nixos-unstable#git --depth=100 --colorize="openssl-3|sqlite-3" --inverse="openssl-3|sqlite-3"
```
The output now becomes:

<img src="img/git_r2_col_inv.svg">
<br /><br />

The output graph shows that there are three dependency paths from `git` to `openssl-3.0.7` and one dependency path that leads to `sqlite-3.39.4`.

#### Example: package buildtime dependencies
```bash
$ nixgraph github:NixOS/nixpkgs/nixos-unstable#wget --buildtime
```

Specifying `--buildtime` makes `nixgraph` visualize the buildtime dependencies instead of runtime dependencies:

<img src="img/wget_b1.svg">
<br /><br />


#### Example: output format
```bash
$ nixgraph github:NixOS/nixpkgs/nixos-unstable#wget --out="graph.dot"
```
By default `nixgraph` outputs the graph in png image `graph.png`. To change the output file name and format, use the `--out` argument. The output filename extension determines the output format. As an example, the above command would output the graph in `dot` format. For a full list of supported output formats, see: https://graphviz.org/doc/info/output.html. In addition to graphviz supported output formats, the tool supports output in csv to allow post-processing the output data.


#### Example: pathnames
```bash
$ nixgraph github:NixOS/nixpkgs/nixos-unstable#wget --depth=1 --pathnames
```

`--pathnames` argument allows adding store path to node label in the output graph:

<img src="img/wget_r1_paths.svg">
<br /><br />


================================================
FILE: doc/nixmeta.md
================================================
<!--
SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)

SPDX-License-Identifier: CC-BY-SA-4.0
-->

# Getting Started
To get started, follow the [Getting Started](../README.md#getting-started) section from the main [README](../README.md).

As an example, to run the [`nixmeta`](../src/nixmeta/main.py) from the `tiiuae/sbomnix` repository:
```bash
# '--' signifies the end of argument list for `nix`.
# '--help' is the first argument to `nixmeta`
$ nix run github:tiiuae/sbomnix#nixmeta -- --help
```

# nixmeta
[`nixmeta`](../src/nixmeta/main.py) is a command line tool to summarize nixpkgs meta-attributes from the given nixpkgs version. The output is written to a csv file. Nixpkgs version is specified with [`flakeref`](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake#flake-references). As an example, `--flakeref=github:NixOS/nixpkgs?ref=master` would make `nixmeta` output the meta-attributes from the nixpkgs version in the [master](https://github.com/NixOS/nixpkgs/tree/master) branch. Similarly, `--flakeref=github:NixOS/nixpkgs?ref=release-23.11` would output the meta-attributes from the nixpkgs version in the [release-23.11](https://github.com/NixOS/nixpkgs/tree/release-23.11) branch. Note that `--flakeref` does not necessarily have to reference `github:NixOS/nixpkgs` but any flakeref or even `NIX_ENV` environment variable can be used to specify the nixpkgs version. As an example, `--flakeref=github:tiiuae/sbomnix` would make `nixmeta` output the meta-attributes from the nixpkgs version [pinned by the sbomnix flake](https://github.com/tiiuae/sbomnix/blob/c243db5272fb01c4d97cbbb01a095ae514cd2dcb/flake.lock#L68) in its default branch.

As an example, below command outputs nixpkgs meta-attributes from the nixpkgs version pinned by flake `github:NixOS/nixpkgs?ref=master`:

```bash
$ nixmeta --flakeref=github:NixOS/nixpkgs?ref=master
INFO     Finding meta-info for nixpkgs pinned in flake: github:NixOS/nixpkgs?ref=master
INFO     Wrote: /home/foo/sbomnix-fork/nixmeta.csv
```

Output summarizes the meta-attributes of all the target nixpkgs packages enumerated by `nix-env --query --available`.
For each package, the output includes the following details:

```bash
$ head -n2 nixmeta.csv | csvlook
| name       | pname | version | meta_homepage        | meta_unfree | meta_license_short               | meta_license_spdxid                    | meta_maintainers_email |
| ---------- | ----- | ------- | -------------------- | ----------- | -------------------------------- | -------------------------------------- | ---------------------- |
| 0ad-0.0.26 | 0ad   | 0.0.26  | https://play0ad.com/ |       False | gpl2;lgpl21;mit;cc-by-sa-30;zlib | GPL-2.0;LGPL-2.1;MIT;CC-BY-SA-3.0;Zlib | nixpkgs@cvpetegem.be   |

```


================================================
FILE: doc/provenance.md
================================================
<!--
SPDX-FileCopyrightText: 2024 Technology Innovation Institute (TII)

SPDX-License-Identifier: CC-BY-SA-4.0
-->

# Getting Started

To get started, follow the [Getting Started](../README.md#getting-started) section from the main [README](../README.md).

As an example, to run the [`provenance`](../src/provenance/main.py) tool from the `tiiuae/sbomnix` repository:

```bash
# '--' signifies the end of argument list for `nix`.
# '--help' is the first argument to `provenance`
$ nix run github:tiiuae/sbomnix#provenance -- --help
```

# provenance

[`provenance`](../src/provenance/main.py) is a command line tool to generate SLSA v1.0 compliant [provenance](https://slsa.dev/spec/v1.0/provenance) attestation files in json format for any nix flake or derivation.

To generate provenance file for `nixpkgs#hello`:

```bash
provenance nixpkgs#hello
```

To generate provenance file for `curl-8.6.0` in your nix store:

```bash
provenance /nix/store/fh7vxc5xgiwl6z7vwq5c3lj84mpcs4br-curl-8.6.0-bin
```

By default the dependencies are resolved only at the top level, i.e. only direct dependencies.
To get all dependencies recursively, you can use the `--recursive` option.
Note that this will result in a very long provenance file.

The dependencies listed are the nix buildtime dependencies of the derivation.

Example recursive provenance which is saved into a file:

```bash
provenance nixpkgs#hello --recursive --out ./provenance.json
```

## Build metadata

The build metadata to be used in the provenance is supplied through environment variables.
These fields cannot be automatically derived from the nix derivation as they are build platform dependant.

Variable | Type | Explanation
--- | --- | ---
PROVENANCE_BUILD_TYPE | str | Corresponds to SLSA [buildDefinition.buildType](https://slsa.dev/spec/v1.0/provenance#builddefinition)
PROVENANCE_BUILDER_ID | str | Corresponds to SLSA [runDetails.builder.id](https://slsa.dev/spec/v1.0/provenance#builder)
PROVENANCE_INVOCATION_ID | str/int | Corresponds to SLSA [buildMetadata.invocationId](https://slsa.dev/spec/v1.0/provenance#buildmetadata)
PROVENANCE_TIMESTAMP_BEGIN | int (unix timestamp) | Is parsed into SLSA [buildMetadata.startedOn](https://slsa.dev/spec/v1.0/provenance#buildmetadata)
PROVENANCE_TIMESTAMP_FINISHED | int (unix timestamp) | Is parsed into SLSA [buildMetadata.finishedOn](https://slsa.dev/spec/v1.0/provenance#buildmetadata)
PROVENANCE_EXTERNAL_PARAMS | json | Corresponds to SLSA [buildDefinition.externalParameters](https://slsa.dev/spec/v1.0/provenance#builddefinition)
PROVENANCE_INTERNAL_PARAMS | json | Corresponds to SLSA [buildDefinition.internalParameters](https://slsa.dev/spec/v1.0/provenance#builddefinition)
PROVENANCE_OUTPUT_FILE | path | Has the same function as the `--out` argument.

Example usage in a simplified build script:

```bash
target="nixpkgs#hello"

PROVENANCE_TIMESTAMP_BEGIN="$(date +%s)"

nix build $target

PROVENANCE_TIMESTAMP_FINISHED="$(date +%s)"

PROVENANCE_EXTERNAL_PARAMS="$(jq -n --arg target "$target" '$ARGS.named')"
PROVENANCE_INTERNAL_PARAMS="$(jq -n --arg nixVersion "$(nix --version)" '$ARGS.named')"

export PROVENANCE_TIMESTAMP_BEGIN
export PROVENANCE_TIMESTAMP_FINISHED
export PROVENANCE_EXTERNAL_PARAMS
export PROVENANCE_INTERNAL_PARAMS

provenance $target --out ./provenance.json
```


================================================
FILE: doc/repology_cli.md
================================================
<!--
SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)

SPDX-License-Identifier: CC-BY-SA-4.0
-->

# repology_cli

[`repology_cli`](../src/repology/repology_cli.py) is a command line interface to [repology.org](https://repology.org/). It supports querying package information via package search terms in the same manner as https://repology.org/projects/?search. In addition, it supports querying package information from all packages in a CycloneDX SBOM and printing out some simple statistics based on the input.


Table of Contents
=================

* [Getting Started](#getting-started)
* [Usage Examples](#usage-examples)
   * [Search by Package Name Exact Match](#search-by-package-name-exact-match)
   * [Search by Package Name Search Term](#search-by-package-name-search-term)
   * [Search by Package Names in SBOM](#search-by-package-names-in-sbom)
   * [Statistics: SBOM Packages](#statistics-sbom-packages)
   * [Repology CVE search](#repology-cve-search)

## Getting Started
To get started, follow the [Getting Started](../README.md#getting-started) section from the main [README](../README.md).

As an example, to run the [`repology_cli`](../src/repology/repology_cli.py) from your local clone of the `tiiuae/sbomnix` repository:
```bash
# '--' signifies the end of argument list for `nix`.
# '--help' is the first argument to `repology_cli`
$ nix run .#repology_cli -- --help
```

## Usage Examples

### Search by Package Name Exact Match
Following query finds package name 'firefox' versions in 'nix_unstable' repository:
```bash
$ repology_cli --pkg_exact "firefox" --repository nix_unstable

INFO     Repology package info, packages:5

| repo         | package | version               | status   | potentially_vulnerable | newest_upstream_release | repo_version_classify |
|--------------+---------+-----------------------+----------+------------------------+-------------------------+-----------------------|
| nix_unstable | firefox | 102-unwrapped-102.8.0 | legacy   |           1            | 110.0.1                 |                       |
| nix_unstable | firefox | 102.8.0               | legacy   |           1            | 110.0.1                 |                       |
| nix_unstable | firefox | 110.0.1               | newest   |           0            | 110.0.1                 |                       |
| nix_unstable | firefox | 111.0b7               | outdated |           0            | 110.0.1                 | repo_pkg_needs_update |
| nix_unstable | firefox | 111.0b8               | devel    |           0            | 110.0.1                 |                       |

For more details, see: https://repology.org/projects/?search=firefox&inrepo=nix_unstable

INFO     Wrote: repology_report.csv
```

Output table includes the datapoints available in repology.org, as stated by each column name. As an example, the first row in the above output table means:
- package information was fetched for repository 'nix_unstable'
- package name is 'firefox'
- latest 'nix_unstable' includes a version of firefox with version string '102-unwrapped-102.8.0'
- firefox '102-unwrapped-102.8.0' status is 'legacy'. The details of each classification status is available in https://repology.org/docs/about.
- firefox '102-unwrapped-102.8.0' is potentially vulnerable, meaning the package version is associated to at least one CVE. For details of which CVEs repology determined the package is associated to, see: https://repology.org/project/firefox/cves or https://repology.org/project/firefox/cves?version=102-unwrapped-102.8.0
- newest upstream release version of firefox known to repology is '110.0.1'

In addition to the above datapoints, `repology_cli` adds the column 'repo_version_classify', which simply states whether the specific package version appears updatable in the given repository. As an example, in the above output, the second last row states 'repo_pkg_needs_update' which means that it appears 'nix_unstable' should update the firefox '111.0b7' to the latest firefox upstream release version '110.0.1'.

Full list of repositories available in repology are available in https://repology.org/repositories/statistics. As an example, to repeat the earlier query for Debian 12, you would run:

```bash
$ repology_cli --pkg_exact "firefox" --repository debian_12

INFO     Repology package info, packages:1

| repo      | package   | version   | status   |  potentially_vulnerable  | newest_upstream_release   | repo_version_classify   |
|-----------+-----------+-----------+----------+--------------------------+---------------------------+-------------------------|
| debian_12 | firefox   | 102.8.0   | outdated |            1             | 110.0.1                   | repo_pkg_needs_update   |

For more details, see: https://repology.org/projects/?search=firefox&inrepo=debian_12

INFO     Wrote: repology_report.csv
```

### Search by Package Name Search Term
Following query finds 'debian_12' packages that include 'firefox' anywhere in the name string:

```bash
$ repology_cli --pkg_search "firefox" --repository debian_12

INFO     Repology package info, packages:5

| repo      | package                     | version | status   | potentially_vulnerable | newest_upstream_release | repo_version_classify |
|-----------+-----------------------------+---------+----------+------------------------+-------------------------+-----------------------|
| debian_12 | activity-aware-firefox      | 0.4.1   | unique   |           0            |                         |                       |
| debian_12 | firefox                     | 102.8.0 | outdated |           1            | 110.0.1                 | repo_pkg_needs_update |
| debian_12 | firefox-esr-mobile-config   | 3.2.0   | unique   |           0            |                         |                       |
| debian_12 | foxyproxy-firefox-extension | 7.5.1   | unique   |           0            |                         |                       |
| debian_12 | perl:firefox-marionette     | 1.35    | newest   |           0            | 1.35                    |                       |
```

Notice: using short search strings with `--pkg_search` might result a large number of matches and, thus, potentially a large number of queries to repology.org. To avoid spamming repology.org with such queries, `repology_cli` limits the number of requests sent to repology.org to at most one request per second. In addition, it caches all responses locally for two hours.

### Search by Package Names in SBOM
Following query finds 'nix_unstable' packages that match the packages in the CycloneDX sbom 'wget.runtime.sbom.cdx.json':

```bash
$ repology_cli --sbom_cdx  wget.runtime.sbom.cdx.json --repository nix_unstable

INFO     Repology package info, packages:9

| repo         | package      | version  | status   | potentially_vulnerable | newest_upstream_release | version_sbom | sbom_version_classify | repo_version_classify |
|--------------+--------------+----------+----------+------------------------+-------------------------+--------------+-----------------------+-----------------------|
| nix_unstable | glibc        | 2.35-224 | outdated |           0            | 2.37                    | 2.35-224     | sbom_pkg_needs_update | repo_pkg_needs_update |
| nix_unstable | libidn2      | 2.3.2    | outdated |           0            | 2.3.4                   | 2.3.2        | sbom_pkg_needs_update | repo_pkg_needs_update |
| nix_unstable | libunistring | 1.0      | outdated |           0            | 1.1                     | 1.0          | sbom_pkg_needs_update | repo_pkg_needs_update |
| nix_unstable | openssl      | 1.1.1t   | legacy   |           0            | 3.0.8                   | 3.0.8        |                       |                       |
| nix_unstable | openssl      | 3.0.8    | newest   |           0            | 3.0.8                   | 3.0.8        |                       |                       |
| nix_unstable | pcre         | 8.45     | newest   |           0            | 8.45                    | 8.45         |                       |                       |
| nix_unstable | wget         | 1.21.3   | legacy   |           0            | 2.0.1                   | 1.21.3       |                       |                       |
| nix_unstable | wget         | 2.0.1    | newest   |           0            | 2.0.1                   | 1.21.3       | sbom_pkg_needs_update |                       |
| nix_unstable | zlib         | 1.2.13   | newest   |           0            | 1.2.13                  | 1.2.13       |                       |                       |
```

Output includes package details from the packages in the given SBOM that were also found in repology.org. In addition to the datapoints covered in section [Search by Package Name Exact Match](#search-by-package-name-exact-match), `repology_cli` adds the column 'sbom_version_classify' which states whether the package version in SBOM appears outdated. As an example, in the above output, package 'wget' version in sbom is '1.21.3'. Column 'sbom_version_classify' states 'sbom_pkg_needs_update' because 'nix_unstable' would have an update to the 'wget' package to version '2.0.1'.

### Statistics: SBOM Packages
Following is the same query as above, but adds the command-line argument `--stats` to print out some simple statistics that might help explain the results.

```bash
$ repology_cli --sbom_cdx  wget.runtime.sbom.cdx.json --repository nix_unstable --stats
INFO     Repology package info, packages:9

| repo         | package      | version   | status   |  potentially_vulnerable  | newest_upstream_release   | version_sbom   | sbom_version_classify   | repo_version_classify   |
|--------------+--------------+-----------+----------+--------------------------+---------------------------+----------------+-------------------------+-------------------------|
| nix_unstable | glibc        | 2.35-224  | outdated |            0             | 2.37                      | 2.35-224       | sbom_pkg_needs_update   | repo_pkg_needs_update   |
| nix_unstable | libidn2      | 2.3.2     | outdated |            0             | 2.3.4                     | 2.3.2          | sbom_pkg_needs_update   | repo_pkg_needs_update   |
| nix_unstable | libunistring | 1.0       | outdated |            0             | 1.1                       | 1.0            | sbom_pkg_needs_update   | repo_pkg_needs_update   |
| nix_unstable | openssl      | 1.1.1t    | legacy   |            0             | 3.0.8                     | 3.0.8          |                         |                         |
| nix_unstable | openssl      | 3.0.8     | newest   |            0             | 3.0.8                     | 3.0.8          |                         |                         |
| nix_unstable | pcre         | 8.45      | newest   |            0             | 8.45                      | 8.45           |                         |                         |
| nix_unstable | wget         | 1.21.3    | legacy   |            0             | 2.0.1                     | 1.21.3         |                         |                         |
| nix_unstable | wget         | 2.0.1     | newest   |            0             | 2.0.1                     | 1.21.3         | sbom_pkg_needs_update   |                         |
| nix_unstable | zlib         | 1.2.13    | newest   |            0             | 1.2.13                    | 1.2.13         |                         |                         |

For more details, see: https://repology.org/projects/

INFO
	Repology package statistics:
	 (see the status descriptions in: https://repology.org/docs/about)
	   Unique compared packages: 7 (100%)	(status in: ['newest', 'devel', 'unique', 'outdated'])
	    ==> newest: 4 (57%)
	    ==> outdated: 3 (43%)
	    ==> devel or unique: 0 (0%)
	    ==> potentially vulnerable: 0 (0%)

INFO
	Repology SBOM package statistics:
	  Unique packages: 10 (100%)
	   ==> sbom packages in repology: 9 (90%)
	   ==> sbom packages not in repology: 1 (10%)
	        - IGNORED (sbom component is not a package in repology): 0
	        - NO_VERSION (sbom component is missing the version number): 0
	        - NOT_FOUND (sbom component was not found in repology): 1

INFO     Wrote: repology_report.csv
```
Section 'Repology package statistics' in the console output indicates that:
- There were seven packages whose status was one of `['newest', 'devel', 'unique', 'outdated']`. These are the package statuses `repology_cli` considers in the statistics output.
- Four out of the total of seven packages had the status 'newest'. This number indicates how many packages are up-to-date with its known latest release version in upstream.
- Three out of seven packages have the status 'outdated'. This number indicates how many packages are not up-to-date with its known latest upstream release version in 'nix_unstable' repository.
- There were no devel or unique packages. 'devel' packages indicate latest development or unstable package versions, whereas, 'unique' packages are only present in a single repository family, meaning there are no other sources for repology.org to compare them against.
- There were no packages with known vulnerabilities associated to them.

Section 'Repology SBOM package statistics' in the console output indicates that:
- The baseline for SBOM package comparison is ten unique packages. This number includes the unique components in the cdx SBOM (as identified by the component name and version), as well as other current package versions in 'nix_unstable' known to repology.
- Nine component names in the SBOM can be matched with package names in repology.
- One package was not included to the comparison by `repology_cli`. The reason is 'NOT_FOUND', meaning the package was not found in repology.org. Other possible reasons for `repology_cli` to skip SBOM packages are IGNORED and NO_VERSION. IGNORED means the sbom component name indicates the component is not a package in repology.org. Typical examples of IGNORED packages would be archives (.tar.gz) or patches (.patch). NO_VERSION means the sbom component was missing the version information. Typically, such packages are service files, scripts, or configuration files that are not considered as packages in repology.org but can be included as separate components in the SBOM.

In addition to the console output `repology_cli` outputs the full data set in csv file. As an example, you could query the `repology_report.csv` for more details of the skipped packages:

```bash

$ csvsql --query "select * from repology_report where status == 'NOT_FOUND'" repology_report.csv | csvlook

| repo         | package            | version | status    |       | version_sbom |
| ------------ | ------------------ | ------- | --------- |  ...  | ------------ |
| nix_unstable | util-linux-minimal | 2.38.1  | NOT_FOUND |       | 2.38.1       |
```

Above, we can see the package 'util-linux-minimal' which is one of the components in the example sbom 'wget.runtime.sbom.cdx.json', is not available (with that exact same name) in repology.org.

### Repology CVE search
Following query shows an example of using the [`repology_cve`](../src/repology/repology_cve.py) client to query CVEs known to repology.org that impact package `openssl` version `3.1.1`.

```bash
$ repology_cve openssl 3.1.1

INFO     Repology affected CVE(s)

| package   | version   | cve           |
|-----------+-----------+---------------|
| openssl   | 3.1.1     | CVE-2023-2975 |
| openssl   | 3.1.1     | CVE-2023-3446 |
| openssl   | 3.1.1     | CVE-2023-3817 |
| openssl   | 3.1.1     | CVE-2023-4807 |
| openssl   | 3.1.1     | CVE-2023-5363 |
| openssl   | 3.1.1     | CVE-2023-5678 |

INFO     Wrote: repology_cves.csv
```


================================================
FILE: doc/vulnxscan.md
================================================
<!--
SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)

SPDX-License-Identifier: CC-BY-SA-4.0
-->

# vulnxscan

[`vulnxscan`](../src/vulnxscan/vulnxscan_cli.py) is a command line utility that demonstrates running vulnerability scans using SBOM as input. It mainly targets nix packages, however, it can be used with any other targets too as long as the target is expressed as valid CycloneDX SBOM.

Table of Contents
=================
* [Getting Started](#getting-started)
* [Example Target](#example-target)
* [Supported Scanners](#supported-scanners)
   * [Nix and OSV Vulnerability Database](#nix-and-osv-vulnerability-database)
   * [Nix and Grype](#nix-and-grype)
   * [Vulnix](#vulnix)
* [Vulnxscan Usage Examples](#vulnxscan-usage-examples)
   * [Find Vulnerabilities Impacting Runtime Dependencies](#find-vulnerabilities-impacting-runtime-dependencies)
   * [Whitelisting Vulnerabilities](#whitelisting-vulnerabilities)
   * [Find Vulnerabilities Given SBOM as Input](#find-vulnerabilities-given-sbom-as-input)
   * [Find Vulnerabilities Impacting Buildtime and Runtime Dependencies](#find-vulnerabilities-impacting-buildtime-and-runtime-dependencies)
   * [Using Whitelist to Record Manual Analysis Results](#using-whitelist-to-record-manual-analysis-results)
   * [Triage to Help Manual Analysis](#triage-to-help-manual-analysis)
* [Footnotes and Future Work](#footnotes-and-future-work)

## Getting Started
To get started, follow the [Getting Started](../README.md#getting-started) section from the main [README](../README.md).

As an example, to run the `vulnxscan` from your local clone of the `tiiuae/sbomnix` repository:
```bash
# '--' signifies the end of argument list for `nix`.
# '--help' is the first argument to `vulnxscan`
$ nix run .#vulnxscan -- --help
```

## Example Target
In the below examples, we use `git` as an example target for `vulnxscan`, referred to by flakeref `github:NixOS/nixpkgs/nixos-unstable#git`.

## Supported Scanners
### Nix and OSV Vulnerability Database
[OSV](https://osv.dev/) is a vulnerability database for open-source projects [initiated by Google](https://security.googleblog.com/2021/02/launching-osv-better-vulnerability.html).

[OSV database](https://osv.dev/list?ecosystem=) currently [does not support Nix ecosystem](https://ossf.github.io/osv-schema/#affectedpackage-field), so queries that specify Nix as ecosystem would not return any matches. For this reason `vulnxscan` currently does not use Google's official [OSV-Scanner](https://security.googleblog.com/2022/12/announcing-osv-scanner-vulnerability.html) front-end, but implements its own OSV client demo in [osv.py](../src/vulnxscan/osv.py).

`osv.py` sends queries to [OSV API](https://osv.dev/docs/) without specifying the ecosystem, only the target package name and version. At the time of writing, such queries to OSV API return vulnerabilities that match the given package and version across all ecosystems. As a result, the OSV vulnerabilities for Nix ecosystem will include false positives.

Also, it is worth mentioning that OSV queries without ecosystem are undocumented in the [API specification](https://osv.dev/docs/#tag/api/operation/OSV_QueryAffected) currently.

### Nix and Grype
[Grype](https://github.com/anchore/grype) is a vulnerability scanner targeted for container images. It uses the vulnerability data from [a variety of publicly available data sources](https://github.com/anchore/grype#grypes-database). Grype also [supports input from CycloneDX SBOM](https://github.com/anchore/grype#supported-sources) which makes it possible to use Grype with SBOM input from `sbomnix`, thus, allowing Grype scans against Nix targets.

### Vulnix
[Vulnix](https://github.com/nix-community/vulnix) is a vulnerability scanner intended for Nix targets. It uses [NIST NVD](https://nvd.nist.gov/vuln) vulnerability database.

Vulnix matches vulnerabilities based on [heuristic](https://github.com/nix-community/vulnix/blob/f56f3ac857626171b95e51d98cb6874278f789d3/src/vulnix/derivation.py#L104), which might result more false positives compared to direct match. False positives due to rough heuristic are an [intended feature](https://github.com/nix-community/vulnix#whitelisting) in vulnix. On the other hand, vulnix accounts [CVE patches](https://github.com/nix-community/vulnix#cve-patch-auto-detection) applied on Nix packages when matching vulnerabilities, something currently not directly supported by other scanners.

## Vulnxscan Usage Examples

### Find Vulnerabilities Impacting Runtime Dependencies
This example shows how to use `vulnxscan` to summarize vulnerabilities impacting the given target or any of its runtime dependencies. The captured output is illustrative; exact versions and findings will differ depending on the package versions resolved at run time.

```bash
# Target can be specified as a flakeref or a nix store path, e.g.:
# vulnxscan .
# vulnxscan github:tiiuae/sbomnix
# vulnxscan nixpkgs#git
# vulnxscan /nix/store/...
# Ref: https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#flake-references
$ vulnxscan github:NixOS/nixpkgs/nixos-unstable#git

INFO     Generating SBOM for target '/nix/store/...-git-<version>'
INFO     CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']
INFO     CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']
INFO     Console report

Potential vulnerabilities impacting version_local:

| vuln_id          | url                                               | package   | version | severity | grype | osv | vulnix | sum |
|------------------+---------------------------------------------------+-----------+---------+----------+-------+-----+--------+-----|
| CVE-2023-3817    | https://nvd.nist.gov/vuln/detail/CVE-2023-3817    | openssl   | 3.0.9   | 5.3      |   1   |  0  |   1    |  2  |
| CVE-2022-38663   | https://nvd.nist.gov/vuln/detail/CVE-2022-38663   | git       | 2.41.0  | 6.5      |   0   |  0  |   1    |  1  |
| CVE-2022-36884   | https://nvd.nist.gov/vuln/detail/CVE-2022-36884   | git       | 2.41.0  | 5.3      |   0   |  0  |   1    |  1  |
| CVE-2022-36883   | https://nvd.nist.gov/vuln/detail/CVE-2022-36883   | git       | 2.41.0  | 7.5      |   0   |  0  |   1    |  1  |
| CVE-2022-36882   | https://nvd.nist.gov/vuln/detail/CVE-2022-36882   | git       | 2.41.0  | 8.8      |   0   |  0  |   1    |  1  |
| CVE-2022-30949   | https://nvd.nist.gov/vuln/detail/CVE-2022-30949   | git       | 2.41.0  | 5.3      |   0   |  0  |   1    |  1  |
| CVE-2022-30948   | https://nvd.nist.gov/vuln/detail/CVE-2022-30948   | git       | 2.41.0  | 7.5      |   0   |  0  |   1    |  1  |
| CVE-2022-30947   | https://nvd.nist.gov/vuln/detail/CVE-2022-30947   | git       | 2.41.0  | 7.5      |   0   |  0  |   1    |  1  |
| MAL-2022-4301    | https://osv.dev/MAL-2022-4301                     | libidn2   | 2.3.4   |          |   0   |  1  |   0    |  1  |
| CVE-2021-21684   | https://nvd.nist.gov/vuln/detail/CVE-2021-21684   | git       | 2.41.0  | 6.1      |   0   |  0  |   1    |  1  |
| CVE-2020-2136    | https://nvd.nist.gov/vuln/detail/CVE-2020-2136    | git       | 2.41.0  | 5.4      |   0   |  0  |   1    |  1  |
| CVE-2019-1003010 | https://nvd.nist.gov/vuln/detail/CVE-2019-1003010 | git       | 2.41.0  | 4.3      |   0   |  0  |   1    |  1  |
| CVE-2018-1000182 | https://nvd.nist.gov/vuln/detail/CVE-2018-1000182 | git       | 2.41.0  | 6.4      |   0   |  0  |   1    |  1  |
| CVE-2018-1000110 | https://nvd.nist.gov/vuln/detail/CVE-2018-1000110 | git       | 2.41.0  | 5.3      |   0   |  0  |   1    |  1  |
| CVE-2016-2781    | https://nvd.nist.gov/vuln/detail/CVE-2016-2781    | coreutils | 9.3     | 6.5      |   1   |  0  |   0    |  1  |

INFO     Wrote: vulns.csv
```

`vulnxscan` first creates an SBOM, then feeds the SBOM (or target path) as input to different vulnerability scanners: [vulnix](https://github.com/nix-community/vulnix), [grype](https://github.com/anchore/grype), and [osv.py](../src/vulnxscan/osv.py) and creates a summary report. The summary report lists the newest vulnerabilities on top, with the `sum` column indicating how many scanners agreed with the exact same finding. In addition to the console output, `vulnxscan` writes the report to csv-file `vulns.csv` to allow easier post-processing of the output.

It is worth mentioning that `vulnxscan` filters out vulnerabilities that it detects are patched, as printed out in the console output on lines like '`CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']`'.
This patch auto-detection works in the similar way as the [patch auto-detection on vulnix](https://github.com/nix-community/vulnix#cve-patch-auto-detection), that is, it is based on detecting vulnerability identifiers from the patch filenames.


### Whitelisting Vulnerabilities
`vulnxscan` supports whitelisting vulnerabilities to exclude false positives, unfixable issues, or vulnerabilities known to be addressed. Whitelist is a csv file that contains rules for the vulnerabilities to be excluded from the vulnxscan console report. Consider the following example whitelist:

```
$ csvlook whitelist.csv

| vuln_id        | package   | comment                                                                 |
| -------------- | --------- | ----------------------------------------------------------------------- |
| MAL-2022-4301  |           | Incorrect package: Issue refers npm libidn2, not libidn2.               |
| CVE-2016-2781  | coreutils | NVD data issue: CPE entry does not correctly state the version numbers. |
| CVE-20.*       | git       | Incorrect package: Impacts Jenkins git plugin, not git.                 |
```

`vuln_id` and `comment` are mandatory columns. `vuln_id` specifies a regular expression that will be used to match the vulnerability identification (`vuln_id`) against that of the `vulnxscan` output. Vulnerabilities that match the regular expression are excluded from the `vulnxscan` console output. If the whitelist includes a `package` column, in addition to matching `vuln_id`, a strict match is required against the `package` field in `vulnxscan` output.

In case many rules match a vulnerability, rules on top of the whitelist are given higher priority.

To be able to verify which vulnerabilities are whitelisted, `vulnxscan` csv output `vulns.csv` includes both whitelisted and non-whitelisted vulnerabilities implied with boolean column `whitelist`. `vulns.csv` also includes the `comment` section from the whitelist to be able to verify the reason for whitelisting each vulnerability. Below example shows applying the above example whitelist against the `git` vulnxscan output from the earlier example.

```bash
# Given the whitelist.csv contents:
$ cat whitelist.csv
"vuln_id","package","comment"
"MAL-2022-4301",,"Incorrect package: Issue refers npm libidn2, not libidn2."
"CVE-2016-2781","coreutils","NVD data issue: CPE entry does not correctly state the version numbers."
"CVE-20.* ","git","Incorrect package: Impacts Jenkins git plugin, not git."

# Apply the whitelist to git vulnxscan output
$ vulnxscan github:NixOS/nixpkgs/nixos-unstable#git --whitelist=whitelist.csv

INFO     Generating SBOM for target '/nix/store/...-git-<version>'
INFO     CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']
INFO     CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']
INFO     Console report

Potential vulnerabilities impacting version_local:

# Note: the console output now includes only non-whitelisted entries:

| vuln_id       | url                                            | package   | version | severity | grype | osv | vulnix | sum |
|---------------+------------------------------------------------+-----------+---------+----------+-------+-----+--------+-----|
| CVE-2023-3817 | https://nvd.nist.gov/vuln/detail/CVE-2023-3817 | openssl   | 3.0.9   |   5.3    |   1   |  0  |   1    |  2  |

INFO     Wrote: vulns.csv

# In addition to the console report, vulnxscan writes a detailed report in a csv file,
# by default 'vulns.csv', which includes the full details also from the whitelisted vulnerabilities:
$ csvlook vulns.csv

| vuln_id          | url                                               | package   | version | severity | grype |   osv | vulnix | sum | sortcol         | whitelist | whitelist_comment                                                       |
| ---------------- | ------------------------------------------------- | --------- | ------- | -------- | ----- | ----- | ------ | --- | --------------- | --------- | ----------------------------------------------------------------------- |
| CVE-2023-3817    | https://nvd.nist.gov/vuln/detail/CVE-2023-3817    | openssl   | 3.0.9   |      5.3 |  True | False |   True |   2 | 2023A0000003817 |     False |                                                                         |
| CVE-2022-38663   | https://nvd.nist.gov/vuln/detail/CVE-2022-38663   | git       | 2.41.0  |      6.5 | False | False |   True |   1 | 2022A0000038663 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2022-36884   | https://nvd.nist.gov/vuln/detail/CVE-2022-36884   | git       | 2.41.0  |      5.3 | False | False |   True |   1 | 2022A0000036884 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2022-36883   | https://nvd.nist.gov/vuln/detail/CVE-2022-36883   | git       | 2.41.0  |      7.5 | False | False |   True |   1 | 2022A0000036883 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2022-36882   | https://nvd.nist.gov/vuln/detail/CVE-2022-36882   | git       | 2.41.0  |      8.8 | False | False |   True |   1 | 2022A0000036882 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2022-30949   | https://nvd.nist.gov/vuln/detail/CVE-2022-30949   | git       | 2.41.0  |      5.3 | False | False |   True |   1 | 2022A0000030949 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2022-30948   | https://nvd.nist.gov/vuln/detail/CVE-2022-30948   | git       | 2.41.0  |      7.5 | False | False |   True |   1 | 2022A0000030948 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2022-30947   | https://nvd.nist.gov/vuln/detail/CVE-2022-30947   | git       | 2.41.0  |      7.5 | False | False |   True |   1 | 2022A0000030947 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| MAL-2022-4301    | https://osv.dev/MAL-2022-4301                     | libidn2   | 2.3.4   |          | False |  True |  False |   1 | 2022A0000004301 |      True | Incorrect package: Issue refers npm libidn2, not libidn2.               |
| CVE-2021-21684   | https://nvd.nist.gov/vuln/detail/CVE-2021-21684   | git       | 2.41.0  |      6.1 | False | False |   True |   1 | 2021A0000021684 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2020-2136    | https://nvd.nist.gov/vuln/detail/CVE-2020-2136    | git       | 2.41.0  |      5.4 | False | False |   True |   1 | 2020A0000002136 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2019-1003010 | https://nvd.nist.gov/vuln/detail/CVE-2019-1003010 | git       | 2.41.0  |      4.3 | False | False |   True |   1 | 2019A0001003010 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2018-1000182 | https://nvd.nist.gov/vuln/detail/CVE-2018-1000182 | git       | 2.41.0  |      6.4 | False | False |   True |   1 | 2018A0001000182 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2018-1000110 | https://nvd.nist.gov/vuln/detail/CVE-2018-1000110 | git       | 2.41.0  |      5.3 | False | False |   True |   1 | 2018A0001000110 |      True | Incorrect package: Impacts Jenkins git plugin, not git.                 |
| CVE-2016-2781    | https://nvd.nist.gov/vuln/detail/CVE-2016-2781    | coreutils | 9.3     |      6.5 |  True | False |  False |   1 | 2016A0000002781 |      True | NVD data issue: CPE entry does not correctly state the version numbers. |
```

See ghafscan [manual_analysis.csv](https://github.com/tiiuae/ghafscan/blob/main/manual_analysis.csv) for a more complete example and usage of the vulnxscan whitelisting feature.

### Find Vulnerabilities Given SBOM as Input
This example shows how to use `vulnxscan` to summarize vulnerabilities impacting components in the given CycloneDX SBOM.

First, we use `sbomnix` to generate SBOM for the example target:
```bash
$ nix run .#sbomnix -- github:NixOS/nixpkgs/nixos-unstable#git
..
INFO     Wrote: sbom.cdx.json
```

Then, give the generated SBOM as input to `vulnxscan`:
```bash
$ vulnxscan --sbom sbom.cdx.json

INFO     Console report

Potential vulnerabilities impacting version_local:

| vuln_id       | url                                            | package   | version | severity | grype | osv | sum |
|---------------+------------------------------------------------+-----------+---------+----------+-------+-----+-----|
| CVE-2023-3817 | https://nvd.nist.gov/vuln/detail/CVE-2023-3817 | openssl   | 3.0.9   | 5.3      |   1   |  0  |  1  |
| CVE-2023-2975 | https://nvd.nist.gov/vuln/detail/CVE-2023-2975 | openssl   | 3.0.9   | 5.3      |   1   |  0  |  1  |
| MAL-2022-4301 | https://osv.dev/MAL-2022-4301                  | libidn2   | 2.3.4   |          |   0   |  1  |  1  |
| CVE-2016-2781 | https://nvd.nist.gov/vuln/detail/CVE-2016-2781 | coreutils | 9.3     | 6.5      |   1   |  0  |  1  |

INFO     Wrote: vulns.csv
```
Notice that `vulnxscan` drops the vulnix scan when the input is SBOM. This is due to the vulnix not supporting SBOM input at the time of writing.

Also notice that `vulnxscan` drops the patch auto-detection if the input is SBOM. Reason is that `vulnxscan` reads the patch information from nix derivations. Therefore, the patch information is only available when the given input is Nix store path (e.g. derivation or out-path), not SBOM.


### Find Vulnerabilities Impacting Buildtime and Runtime Dependencies
By default, `vulnxscan` scans the given target for vulnerabilities that impact its runtime-only dependencies. This example shows how to use `vulnxscan` to include also buildtime dependencies to the scan.

```bash
$ vulnxscan ./result --buildtime

# ... output not included in this snippet ...
```

### Using Whitelist to Record Manual Analysis Results
`vulnxscan` supports using whitelist csv file as a more generic record of manual analysis results, by allowing non-whitelisting rules. That is, the whitelist csv file can include a boolean `whitelist` column to indicate if the matching vulnerabilities should be whitelisted or not. The default value for `whitelist` is True, that is, if the `whitelist` column is missing or the value is empty, `vulnxscan` interprets the rule as if the `whitelist` column value would evaluate to True.

As an example, consider the following manual analysis record (i.e. 'whitelist'):

```
csvlook manual_analysis.csv

| vuln_id        | whitelist | package   | comment                                                            |
| -------------- | --------- | --------- | ------------------------------------------------------------------ |
| CVE-2022-0856  |     False | libcaca   | Not fixed upstream: https://github.com/cacalabs/libcaca/issues/65. |
| CVE-2021-32490 |     False | djvulibre | Pending merge: https://github.com/NixOS/nixpkgs/pull/246773.       |
```

The above example `manual_analysis.csv` includes two rules: one for `CVE-2022-0856` and one for `CVE-2021-32490`. For both, the `whitelist` column value is '`False`', indicating the rule is a non-whitelisting rule. This means, for both cases, we want to record the manual analysis results as detailed in the `comment` column, but we don't want to whitelist the matching vulnerabilities. Specifically, in the case of `CVE-2022-0856` we don't want to whitelist the issue since it's not fixed upstream, but we still want to record the link to the upstream PR to make it easier to follow the upstream progress. In the case of `CVE-2021-32490` we don't want to whitelist the issue since the nixpkgs PR is pending merge. In this case too, we still want to record the nixpkgs PR to allow following the progress.

See ghafscan [manual_analysis.csv](https://github.com/tiiuae/ghafscan/blob/main/manual_analysis.csv) for a more complete example and usage of non-whitelisting rules to help manual analysis.

### Triage to Help Manual Analysis
`vulnxscan` can be used to help manual analysis with `--triage` and `--nixprs` command line options.

With command line option `--triage`, `vulnxscan` queries repology.org for nix-unstable and package upstream version information, as well as the CVE impacted versions. With the additional information from repology.org, `vulnxscan` classifies each vulnerability accordingly.

Consider the following example, using [ghaf](https://github.com/tiiuae/ghaf) as target:

```bash
# Run vulnxscan:
#  --buildtime: Scan buildtime dependencies. Scanning buildtime dependencies does not
#               require building the target, which allows relatively quick scan also for
#               targets not built earlier. Notice: nix 'buildtime' dependencies are a
#               superset of runtime dependencies.
#  --whitelist: Use 'manual_analysis.csv' as a whitelist file.
#  --triage   : Help manual analysis by querying version info from repology.org.
$ vulnxscan github:tiiuae/ghaf?ref=main#packages.x86_64-linux.generic-x86_64-release --buildtime --whitelist=manual_analysis.csv --triage
INFO     Generating SBOM for target '/nix/store/...-nixos-disk-image.drv'
INFO     CVE-2023-27371 for 'libmicrohttpd' is patched with: ['/nix/store/l53sq07v6hghm7cchcjbrwyvjyjag06r-CVE-2023-27371.patch']
INFO     CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']
INFO     CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']
INFO     CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']
INFO     CVE-2023-2975 for 'openssl' is patched with: ['/nix/store/7gz0nj14469r9dlh8p0j5w5wjj3b6hw4-CVE-2023-2975.patch']
INFO     CVE-2023-2617 for 'opencv' is patched with: ['/nix/store/vw29nr5nrfs10vv5p3m7rpkqscwrh4sp-CVE-2023-2617.patch']
...

Potential vulnerabilities impacting version_local:

| vuln_id             | package    | severity | version_local | version_nixpkgs | version_upstream | classify                             |
|---------------------+------------+----------+---------------+-----------------+------------------+--------------------------------------|
| CVE-2023-40360      | qemu       | 5.5      | 8.0.2         | 8.1.0           | 8.1.0            | fix_update_to_version_nixpkgs        |
| CVE-2023-40359      | xterm      | 9.8      | 379           | 384             | 384              | fix_update_to_version_nixpkgs        |
| CVE-2023-39742      | giflib     | 5.5      | 5.2.1         | 5.2.1           | 5.2.1            | fix_not_available                    |
| CVE-2023-39533      | go         | 7.5      | 1.20.6        | 1.21.1          | 1.21.1           | fix_update_to_version_nixpkgs        |
| CVE-2023-38858      | faad2      | 6.5      | 2.10.1        | 2.10.1          | 2.10.1           | fix_not_available                    |
| CVE-2023-38857      | faad2      | 5.5      | 2.10.1        | 2.10.1          | 2.10.1           | fix_not_available                    |
| CVE-2023-38633      | librsvg    | 5.5      | 2.55.1        | 2.56.3          | 2.56.3           | fix_update_to_version_nixpkgs        |
| CVE-2023-37769      | pixman     | 6.5      | 0.42.2        | 0.42.2          | 0.42.2           | err_not_vulnerable_based_on_repology |
| CVE-2023-31484      | perl       | 8.1      | 5.36.0-env    | 5.38.0          | 5.38.0           | fix_update_to_version_nixpkgs        |
| CVE-2023-31484      | perl       | 8.1      | 5.36.0        | 5.38.0          | 5.38.0           | fix_update_to_version_nixpkgs        |
| CVE-2023-30571      | libarchive | 5.3      | 3.6.2         | 3.6.2           | 3.7.1            | fix_update_to_version_upstream       |
| CVE-2023-29409      | go         | 5.3      | 1.20.6        | 1.21.1          | 1.21.1           | fix_update_to_version_nixpkgs        |
| CVE-2023-29383      | shadow     | 3.3      | 4.13          | 4.13            | 4.14.0           | fix_update_to_version_upstream       |

... (output truncated) ...

INFO     Wrote: /home/hrosten/projects/sbomnix-fork/vulns.csv
INFO     Wrote: /home/hrosten/projects/sbomnix-fork/vulns.triage.csv
```

As an example, the output table states the following:
- Package `qemu` 8.0.2, which is a dependency to ghaf, is potentially vulnerable to CVE-2023-40360.
- Based on repology.org, `qemu` newest version in nix-unstable is 8.0.4. Also, based on repology.org, latest `qemu` version in the `qemu` upstream is 8.1.0.
- Since both `qemu` 8.0.2 and 8.0.4 are vulnerable to CVE-2023-40360, but the upstream version 8.1.0 is not vulnerable, `vulnxscan` classifies the issue as `fix_update_to_version_upstream`.
- Package `xterm` version 379 is potentially vulnerable to CVE-2023-40359. Latest version of `xterm` in nix-unstable is 384, which is not vulnerable to CVE-2023-40359. Therefore, `vulnxscan` classifies the issue as `fix_update_to_version_nixpkgs`.
- Package `giflib` version 5.2.1 is potentially vulnerable to CVE-2023-39742. Since there's no known fixed version available in nix-unstable or the package upstream, `vulnxscan` classifies the issue as `fix_not_available`. Notice that the classification is based only on the version numbers. Indeed, it's still possible that there's an upstream patch available in an unreleased version of `giflib` that would fix the issue.
- Package `pixman` version 0.42.2 is potentially vulnerable to CVE-2023-37769. However, based on repology.org, the vulnerability [does not impact](https://repology.org/project/pixman/cves?version=0.42.2) the given version of `pixman`. Therefore, `vulnxscan` classifies the issue as `err_not_vulnerable_based_on_repology`.

##### Nixpkgs PR Search

With command line option `--nixprs`, `vulnxscan` queries github for nixpkgs PRs that might include more information concerning possible nixpkgs fixes for the found vulnerabilities. `--nixprs` adds URLs to (at most five) PRs that appear valid for each vulnerability based on heuristic. The PR search takes significant time due to github API rate limits, which is why it is not enabled by default.

Consider the following example, using the same Ghaf target as earlier:

```bash
# Run vulnscan with --triage and --nixprs
$ vulnxscan github:tiiuae/ghaf?ref=main#packages.x86_64-linux.generic-x86_64-release --buildtime --whitelist=manual_analysis.csv --triage --nixprs
INFO     Generating SBOM for target '/nix/store/...-nixos-disk-image.drv'
...
Potential vulnerabilities impacting version_local:


| vuln_id        | package    | severity   | version_local | version_nixpkgs | version_upstream | classify                      | nixpkgs_pr                                    |
|----------------+------------+------------+---------------+-----------------+------------------+-------------------------------+-----------------------------------------------|
| CVE-2023-40360 | qemu       | 5.5        | 8.0.2         | 8.1.0           | 8.1.0            | fix_update_to_version_nixpkgs | https://github.com/NixOS/nixpkgs/pull/251154  |
| CVE-2023-40359 | xterm      | 9.8        | 379           | 384             | 384              | fix_update_to_version_nixpkgs | https://github.com/NixOS/nixpkgs/pull/244141  |
| CVE-2023-39742 | giflib     | 5.5        | 5.2.1         | 5.2.1           | 5.2.1            | fix_not_available             |                                               |
| CVE-2023-39533 | go         | 7.5        | 1.20.6        | 1.21.1          | 1.21.1           | fix_update_to_version_nixpkgs | https://github.com/NixOS/nixpkgs/pull/253738  |
| CVE-2023-38858 | faad2      | 6.5        | 2.10.1        | 2.10.1          | 2.10.1           | fix_not_available             |                                               |
| CVE-2023-38857 | faad2      | 5.5        | 2.10.1        | 2.10.1          | 2.10.1           | fix_not_available             |                                               |
| CVE-2023-38633 | librsvg    | 5.5        | 2.55.1        | 2.56.3          | 2.56.3           | fix_update_to_version_nixpkgs | https://github.com/NixOS/nixpkgs/pull/246763  |
|                |            |            |               |                 |                  |                               | https://github.com/NixOS/nixpkgs/pull/246860  |
| CVE-2023-37769 | pixman     | 6.5        | 0.42.2        | 0.42.2          | 0.42.2           | err_not_vulnerable_based_on_re|                                               |
| CVE-2023-31484 | perl       | 8.1        | 5.36.0-env    | 5.38.0          | 5.38.0           | fix_update_to_version_nixpkgs | https://github.com/NixOS/nixpkgs/pull/241848  |
|                |            |            |               |                 |                  |                               | https://github.com/NixOS/nixpkgs/pull/247547  |
| CVE-2023-31484 | perl       | 8.1        | 5.36.0        | 5.38.0          | 5.38.0           | fix_update_to_version_nixpkgs | https://github.com/NixOS/nixpkgs/pull/241848  |
|                |            |            |               |                 |                  |                               | https://github.com/NixOS/nixpkgs/pull/247547  |
| CVE-2023-30571 | libarchive | 5.3        | 3.6.2         | 3.6.2           | 3.7.1            | fix_update_to_version_upstream|                                               |
| CVE-2023-29409 | go         | 5.3        | 1.20.6        | 1.21.1          | 1.21.1           | fix_update_to_version_nixpkgs | https://github.com/NixOS/nixpkgs/pull/247034  |
|                |            |            |               |                 |                  |                               | https://github.com/NixOS/nixpkgs/pull/253738  |
| CVE-2023-29383 | shadow     | 3.3        | 4.13          | 4.13            | 4.14.0           | fix_update_to_version_upstream| https://github.com/NixOS/nixpkgs/pull/233924  |
|                |            |            |               |                 |                  |                               | https://github.com/NixOS/nixpkgs/pull/254143  |
```

`vulnxscan` option `--nixprs` adds the column `nixpkgs_pr` to the output, to help manual analysis by listing PRs that appear relevant for the given issue.

## Footnotes and Future Work

For now, consider `vulnxscan` as a demonstration. Some improvement ideas are listed below:
 - Consider adding patch information to SBOM (e.g. via the [pedigree](https://cyclonedx.org/use-cases/#pedigree) attribute) to be able to auto-detect patched vulnerabilities also when the input is SBOM.
 - Vulnerability scanners lack support for parsing the patch data: even if `sbomnix` added the patch data to the output SBOM, we suspect not many vulnerability scanners would read the information. As an example, the following discussion touches this topic on DependencyTrack: https://github.com/DependencyTrack/dependency-track/issues/919.
 - Identifying packages is hard as pointed out in https://discourse.nixos.org/t/the-future-of-the-vulnerability-roundups/22424/5. As an example, CPEs are inaccurate which causes issues in matching vulnerabilities: https://github.com/DependencyTrack/dependency-track/discussions/2290.
 - Nix ecosystem is not supported in OSV: the way `osv.py` makes use of OSV data for Nix targets -- as explained in section [Nix and OSV vulnerability database](#nix-and-osv-vulnerability-database) -- makes the reported OSV vulnerabilities include false positives.

### Other Future Work
- [vulnxscan](../src//vulnxscan/vulnxscan_cli.py) could include more scanners in addition to [vulnix](https://github.com/nix-community/vulnix), [grype](https://github.com/anchore/grype), and [osv.py](../src/vulnxscan/osv.py). Suggestions for other open-source scanners, especially those that can digest CycloneDX or SPDX SBOMs are welcome. Consider e.g. [bombon](https://github.com/nikstur/bombon) and [cve-bin-tool](https://github.com/intel/cve-bin-tool). Adding cve-bin-tool to vulnxscan was [demonstrated](https://github.com/tiiuae/sbomnix/pull/75) earlier, but not merged due to reasons explained in the [PR](https://github.com/tiiuae/sbomnix/pull/75#issuecomment-1670958503).


================================================
FILE: flake.nix
================================================
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0
{
  description = "Flakes file for sbomnix";

  inputs = {
    nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
    flake-parts.url = "github:hercules-ci/flake-parts";
    flake-root.url = "github:srid/flake-root";
    # For preserving compatibility with non-Flake users
    flake-compat = {
      url = "github:nix-community/flake-compat";
      flake = false;
    };
    # pre-commit hooks
    git-hooks-nix = {
      url = "github:cachix/git-hooks.nix";
      inputs = {
        nixpkgs.follows = "nixpkgs";
        flake-compat.follows = "flake-compat";
      };
    };
  };

  outputs =
    inputs@{ flake-parts, ... }:
    flake-parts.lib.mkFlake
      {
        inherit inputs;
      }
      {
        systems = [
          "x86_64-linux"
          "aarch64-linux"
          "aarch64-darwin"
        ];

        imports = [
          ./nix
        ];
      };
}


================================================
FILE: nix/apps.nix
================================================
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0
{
  perSystem =
    { self', ... }:
    {
      apps =
        let
          inherit (self'.packages) sbomnix;
          mkApp = program: description: {
            type = "app";
            inherit program;
            meta = {
              inherit description;
            };
          };
        in
        {
          # nix run .#repology_cli
          repology_cli = mkApp "${sbomnix}/bin/repology_cli" "Query Repology using an SBOM as input";

          # nix run .#repology_cve
          repology_cve = mkApp "${sbomnix}/bin/repology_cve" "Find CVEs for packages known to Repology";

          # nix run .#nix_outdated
          nix_outdated = mkApp "${sbomnix}/bin/nix_outdated" "List outdated nix dependencies in priority order";

          # nix run .#nixgraph
          nixgraph = mkApp "${sbomnix}/bin/nixgraph" "Visualize nix package dependencies";

          # nix run .#nixmeta
          nixmeta = mkApp "${sbomnix}/bin/nixmeta" "Summarize nixpkgs meta-attributes";

          # nix run .#vulnxscan
          vulnxscan = mkApp "${sbomnix}/bin/vulnxscan" "Scan nix artifacts or SBOMs for vulnerabilities";

          # nix run .#provenance
          provenance = mkApp "${sbomnix}/bin/provenance" "Generate SLSA provenance for a nix target";
        };
    };
}


================================================
FILE: nix/default.nix
================================================
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0
{
  imports = [
    ./apps.nix
    ./formatter.nix
    ./packages.nix
    ./git-hooks.nix
  ];
}


================================================
FILE: nix/formatter.nix
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0
{ ... }:
{
  perSystem =
    { config, pkgs, ... }:
    {
      formatter =
        let
          inherit (config.pre-commit.settings) package configFile;
        in
        pkgs.writeShellScriptBin "pre-commit-run" ''
          exec ${pkgs.lib.getExe package} run --all-files --config ${configFile}
        '';
    };
}


================================================
FILE: nix/git-hooks.nix
================================================
# SPDX-FileCopyrightText: 2025-2026 TII (SSRC) and the Ghaf contributors
# SPDX-License-Identifier: Apache-2.0
{ inputs, ... }:
{
  imports = with inputs; [
    git-hooks-nix.flakeModule
  ];
  perSystem =
    { pkgs, ... }:
    let
      pyrightPythonEnv = pkgs.python3.withPackages (
        pp: with pp; [
          beautifulsoup4
          colorlog
          dfdiskcache
          filelock
          graphviz
          pp."license-expression"
          numpy
          packageurl-python
          packaging
          pandas
          pyrate-limiter
          reuse
          requests
          requests-cache
          requests-ratelimiter
          tabulate
        ]
      );
      pyrightWrapper = pkgs.writeShellScriptBin "pyright-sbomnix" ''
        exec ${pkgs.lib.getExe pkgs.pyright} --pythonpath ${pyrightPythonEnv}/bin/python "$@"
      '';
    in
    {
      pre-commit = {
        settings.hooks = {
          gitlint.enable = true;
          typos = {
            enable = true;
            excludes = [
              "^LICENSES/.*"
              "^tests/resources/.*"
            ];
          };
          end-of-file-fixer = {
            enable = true;
            excludes = [
              "^LICENSES/.*"
              "^tests/resources/.*"
            ];
          };
          trim-trailing-whitespace = {
            enable = true;
            excludes = [
              "^LICENSES/.*"
              "^tests/resources/.*"
            ];
          };
          actionlint.enable = true;
          deadnix.enable = true;
          nixfmt.enable = true;
          pyright = {
            enable = true;
            pass_filenames = false;
            settings.binPath = "${pyrightWrapper}/bin/pyright-sbomnix";
          };
          ruff.enable = true;
          ruff-format.enable = true;
          reuse.enable = true;
          shellcheck.enable = true;
          statix = {
            enable = true;
            args = [
              "fix"
            ];
          };
        };
      };
    };
}


================================================
FILE: nix/packages.nix
================================================
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0
{ self, ... }:
{
  perSystem =
    {
      pkgs,
      lib,
      config,
      self',
      ...
    }:
    let
      pp = pkgs.python3.pkgs;
      baseVersion = pkgs.lib.removeSuffix "\n" (builtins.readFile ../VERSION);
      # Append git state so local builds are distinguishable from release
      # artifacts. shortRev is set on a clean tree; dirtyShortRev (Nix >= 2.14)
      # is set when the working tree has uncommitted changes.
      gitSuffix =
        if self ? shortRev then
          "+g${self.shortRev}"
        else if self ? dirtyShortRev then
          "+g${self.dirtyShortRev}"
        else
          "";
      # Thin wrapper that calls a module entry point via the ambient python3.
      # PYTHONPATH (set in shellHook) resolves to the local src/, so edits are
      # picked up without reinstalling.
      mkDevEntry =
        name: module:
        pkgs.writeShellScriptBin name ''
          exec python3 -c "import sys; sys.argv[0]='${name}'; from ${module} import main; main()" "$@"
        '';
      prefix_path = with pkgs; [
        git
        graphviz
        grype
        nix
        nix-visualize
        vulnix
      ];
      check_inputs = with pp; [
        hypothesis
        jsonschema
        pytest
        pytest-cov
        pytest-xdist
      ];
      build_system = with pp; [ setuptools ];
      build_inputs = with pp; [
        beautifulsoup4
        colorlog
        dfdiskcache
        filelock
        graphviz
        pp."license-expression"
        numpy
        packageurl-python
        packaging
        pandas
        pyrate-limiter
        reuse
        requests
        requests-cache
        requests-ratelimiter
        tabulate
      ];
    in
    {
      packages = rec {
        default = sbomnix;
        sbomnix = pp.buildPythonPackage {
          pname = "sbomnix";
          version = "${baseVersion}${gitSuffix}";
          pyproject = true;
          src = lib.cleanSource ../.;
          postPatch = ''
            printf '%s' "${baseVersion}${gitSuffix}" > VERSION
          '';
          build-system = build_system;
          nativeCheckInputs = check_inputs;
          dependencies = build_inputs;
          pythonImportsCheck = [ "sbomnix" ];
          makeWrapperArgs = [
            "--prefix PATH : ${lib.makeBinPath prefix_path}"
          ];
        };
      };
      checks =
        # Force a build of all packages during a `nix flake check`.
        with lib; mapAttrs' (n: nameValuePair "package-${n}") self'.packages;
      devShells.default = pkgs.mkShell {
        name = "sbomnix-devshell";
        packages = [
          pkgs.pyright # for running pyright manually in devshell
          pkgs.ruff # for running ruff manually in devshell
        ]
        ++ check_inputs
        ++ build_system
        ++ build_inputs
        ++ [
          (mkDevEntry "sbomnix" "sbomnix.main")
          (mkDevEntry "nixgraph" "nixgraph.main")
          (mkDevEntry "nixmeta" "nixmeta.main")
          (mkDevEntry "nix_outdated" "nixupdate.nix_outdated")
          (mkDevEntry "vulnxscan" "vulnxscan.vulnxscan_cli")
          (mkDevEntry "repology_cli" "repology.repology_cli")
          (mkDevEntry "repology_cve" "repology.repology_cve")
          (mkDevEntry "provenance" "provenance.main")
        ];
        # Add the repo root to PYTHONPATH, so invoking entrypoints (and them being
        # able to find the python packages in the repo) becomes possible.
        # `pytest.ini` already sets this for invoking `pytest`
        # (cascading down to the processes it spawns), but this is for the developer
        # invoking entrypoints from inside the devshell.
        shellHook = ''
          ${config.pre-commit.installationScript}
          echo 1>&2 "Welcome to the development shell!"
          export PATH=${lib.makeBinPath prefix_path}:$PATH
          export PYTHONPATH="$PYTHONPATH:$(pwd)/src"
          # https://github.com/NixOS/nix/issues/1009:
          export TMPDIR="/tmp"
        '';
      };
    };
}


================================================
FILE: pyproject.toml
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
# SPDX-License-Identifier: Apache-2.0

[build-system]
requires = ["setuptools>=61"]
build-backend = "setuptools.build_meta"

[project]
name = "sbomnix"
dynamic = ["version"]
description = "Utility that generates SBOMs from nix packages"
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.10"
license = { text = "Apache-2.0" }
authors = [{ name = "TII", email = "henri.rosten@unikie.com" }]
classifiers = [
  "Development Status :: 3 - Alpha",
  "License :: OSI Approved :: Apache Software License",
  "Operating System :: POSIX :: Linux",
  "Programming Language :: Python :: 3 :: Only",
]
dependencies = [
  "beautifulsoup4",
  "colorlog",
  "df-diskcache",
  "filelock",
  "graphviz",
  "license-expression",
  "numpy",
  "packageurl-python",
  "packaging",
  "pandas",
  "reuse",
  "requests",
  "requests-cache",
  "requests-ratelimiter",
  "tabulate",
]

[project.urls]
Homepage = "https://github.com/tiiuae/sbomnix"

[project.scripts]
sbomnix = "sbomnix.main:main"
nixgraph = "nixgraph.main:main"
nixmeta = "nixmeta.main:main"
nix_outdated = "nixupdate.nix_outdated:main"
vulnxscan = "vulnxscan.vulnxscan_cli:main"
repology_cli = "repology.repology_cli:main"
repology_cve = "repology.repology_cve:main"
provenance = "provenance.main:main"

[tool.setuptools]
license-files = ["LICENSES/Apache-2.0.txt", "LICENSES/BSD-3-Clause.txt"]

[tool.setuptools.dynamic]
version = { file = ["VERSION"] }

[tool.setuptools.packages.find]
where = ["src"]

[tool.ruff]
line-length = 88
target-version = "py310"

[tool.ruff.lint]
preview = true
select = [
  "B",
  "E4",
  "E7",
  "E9",
  "F",
  "I",
  "PLE",
  "PLW",
  "PLR0911",
  "PLR0912",
  "PLR0913",
  "PLR0914",
  "PLR0915",
  "PLR0917",
  "RUF100",
]

[tool.ruff.lint.isort]
known-first-party = [
  "common",
  "nixgraph",
  "nixmeta",
  "nixupdate",
  "provenance",
  "repology",
  "sbomnix",
  "vulnxscan",
]


================================================
FILE: pyrightconfig.json
================================================
{
    "include": ["src"],
    "extraPaths": ["src"],
    "pythonVersion": "3.10",
    "typeCheckingMode": "standard",
    "reportMissingTypeStubs": false,
}


================================================
FILE: pytest.ini
================================================
# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

[pytest]
pythonpath = .
addopts = --strict-markers
markers =
    integration: indicates a CLI or cross-module integration test.
    network: indicates a test that relies on external network access.
    slow: indicates a slow test.
    grype: indicates a test that invokes grype (triggers grype DB pre-warm).
    real_vulnix: opt-in tests that execute the real vulnix binary.


================================================
FILE: scripts/check-fast.sh
================================================
#!/usr/bin/env bash

# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

set -euo pipefail

nix fmt
nix --extra-experimental-features 'flakes nix-command' flake check --no-build
nix develop --command ./scripts/run-pytest-lane.sh fast


================================================
FILE: scripts/check-full.sh
================================================
#!/usr/bin/env bash

# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

set -euo pipefail

nix --extra-experimental-features 'flakes nix-command' flake check
nix develop --command ./scripts/run-pytest-lane.sh full


================================================
FILE: scripts/release-asset.sh
================================================
#!/usr/bin/env bash

# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

set -euo pipefail

mkdir -p build/

release_target=".#sbomnix"

nix run .#sbomnix -- "$release_target" \
  --cdx=./build/sbom.runtime.cdx.json \
  --spdx=./build/sbom.runtime.spdx.json \
  --csv=./build/sbom.runtime.csv

nix run .#sbomnix -- --buildtime "$release_target" \
  --cdx=./build/sbom.buildtime.cdx.json \
  --spdx=./build/sbom.buildtime.spdx.json \
  --csv=./build/sbom.buildtime.csv

echo
echo "Built release asset:"
ls -la build


================================================
FILE: scripts/run-pytest-lane.sh
================================================
#!/usr/bin/env bash

# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

set -euo pipefail

usage() {
  echo "usage: $0 {fast|full}" >&2
  exit 2
}

lane="${1:-}"
marker_expr=""
coverage=false
pytest_args=(
  -n auto
  -x
)

case "$lane" in
  fast)
    marker_expr="not slow and not network"
    pytest_args+=(-v --durations=10)
    ;;
  full)
    coverage=true
    pytest_args+=(-v --durations=20)
    ;;
  *)
    usage
    ;;
esac

if $coverage; then
  pytest_args+=(
    --cov=src
    --cov-report=term-missing
    --cov-report=xml
  )
fi

if [ -n "$marker_expr" ]; then
  pytest_args+=(-m "$marker_expr")
fi

pytest "${pytest_args[@]}" tests/


================================================
FILE: shell.nix
================================================
# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)
# SPDX-FileCopyrightText: 2020-2023 Eelco Dolstra and the flake-compat contributors
#
# SPDX-License-Identifier: MIT
# This file originates from:
# https://github.com/nix-community/flake-compat
# This file provides backward compatibility to nix < 2.4 clients
{
  system ? builtins.currentSystem,
}:
let
  lock = builtins.fromJSON (builtins.readFile ./flake.lock);

  inherit (lock.nodes.flake-compat.locked)
    owner
    repo
    rev
    narHash
    ;

  flake-compat = fetchTarball {
    url = "https://github.com/${owner}/${repo}/archive/${rev}.tar.gz";
    sha256 = narHash;
  };

  flake = import flake-compat {
    inherit system;
    src = ./.;
  };
in
flake.shellNix


================================================
FILE: src/common/__init__.py
================================================
# SPDX-FileCopyrightText: 2022 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0


================================================
FILE: src/common/cli_args.py
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

"""Common argparse helper functions."""

import argparse
import sys
from weakref import WeakSet

from common.pkgmeta import get_py_pkg_version

_VERBOSE_COUNT_DEST = "_verbose_count"
_VERBOSE_WRAPPED_PARSERS = WeakSet()


class _VerboseCountAction(argparse.Action):
    """Count repeated short verbose flags without using parser defaults."""

    def __init__(self, option_strings, dest, nargs=0, **kwargs):
        if nargs != 0:
            raise ValueError("nargs must be 0")
        super().__init__(option_strings, dest, nargs=0, **kwargs)

    def __call__(self, _parser, namespace, _values, _option_string=None):
        count = getattr(namespace, _VERBOSE_COUNT_DEST, 0) + 1
        setattr(namespace, _VERBOSE_COUNT_DEST, count)
        setattr(namespace, self.dest, count)


def check_positive(val):
    """Raise ArgumentTypeError if val is not a positive integer."""
    intval = int(val)
    if intval <= 0:
        raise argparse.ArgumentTypeError(f"{val} is not a positive integer")
    return intval


def _is_integer(value):
    """Return True if value can be parsed as an integer."""
    try:
        int(value)
    except ValueError:
        return False
    return True


def _normalize_verbose_args(args):
    """Normalize compact short verbose values before argparse sees positionals."""
    normalized = []
    args = list(sys.argv[1:] if args is None else args)
    idx = 0
    while idx < len(args):
        arg = args[idx]
        if arg == "-v" and idx + 1 < len(args) and _is_integer(args[idx + 1]):
            normalized.append(f"--verbose={args[idx + 1]}")
            idx += 2
            continue
        if arg.startswith("-v") and arg != "-v":
            value = arg[2:]
            if value.startswith("="):
                value = value[1:]
            if value and _is_integer(value):
                normalized.append(f"--verbose={value}")
                idx += 1
                continue
        normalized.append(arg)
        idx += 1
    return normalized


def _finalize_verbose_namespace(namespace):
    """Remove internal argparse bookkeeping from the parsed namespace."""
    if hasattr(namespace, _VERBOSE_COUNT_DEST):
        delattr(namespace, _VERBOSE_COUNT_DEST)
    return namespace


def _wrap_verbose_parser(parser):
    """Teach parse_known_args to normalize compact short verbose values."""
    if parser in _VERBOSE_WRAPPED_PARSERS:
        return

    parse_known_args = parser.parse_known_args

    def parse_known_args_with_verbose(args=None, namespace=None):
        namespace, extras = parse_known_args(
            _normalize_verbose_args(args),
            namespace,
        )
        return _finalize_verbose_namespace(namespace), extras

    parser.parse_known_args = parse_known_args_with_verbose
    _VERBOSE_WRAPPED_PARSERS.add(parser)


def add_verbose_argument(parser, default=0, max_level=3, root_parser=None):
    """Add a standard verbose flag to an argparse parser."""
    _wrap_verbose_parser(root_parser or parser)
    parser.set_defaults(verbose=default, **{_VERBOSE_COUNT_DEST: 0})
    levels = ["0=INFO", "1=VERBOSE", "2=DEBUG", "3=SPAM"]
    level_help = ", ".join(levels[: max_level + 1])
    short_help = (
        f"Increase verbosity; repeat as -vv for DEBUG (default: --verbose={default})"
    )
    long_help = (
        f"Set verbosity level explicitly ({level_help}) (default: --verbose={default})"
    )
    parser.add_argument(
        "-v",
        action=_VerboseCountAction,
        dest="verbose",
        help=short_help,
    )
    parser.add_argument(
        "--verbose",
        type=int,
        dest="verbose",
        metavar="N",
        help=long_help,
    )


def add_version_argument(parser, package="sbomnix"):
    """Add a standard version flag to an argparse parser."""
    parser.add_argument(
        "--version", action="version", version=get_py_pkg_version(package)
    )


================================================
FILE: src/common/columns.py
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

"""Shared DataFrame column names used across package boundaries."""

COMMENT = "comment"
COUNT = "count"
CPE = "cpe"
CLASSIFY = "classify"
DEPENDENCY_UID = "dependency_uid"
LEVEL = "level"
MODIFIED = "modified"
NAME = "name"
NEWEST_UPSTREAM_RELEASE = "newest_upstream_release"
NIXPKGS_PR = "nixpkgs_pr"
OUTPUTS = "outputs"
PACKAGE = "package"
PACKAGE_REPOLOGY = "package_repology"
PATCHED = "patched"
PNAME = "pname"
POTENTIALLY_VULNERABLE = "potentially_vulnerable"
RAW_NAME = "raw_name"
REPO = "repo"
REPO_VERSION_CLASSIFY = "repo_version_classify"
SBOM_VERSION_CLASSIFY = "sbom_version_classify"
SCANNER = "scanner"
SEVERITY = "severity"
SIMILARITY = "similarity"
SORTCOL = "sortcol"
SRC_PATH = "src_path"
STATUS = "status"
STORE_PATH = "store_path"
SUM = "sum"
TARGET_PATH = "target_path"
URL = "url"
VERSION = "version"
VERSION_CMP = "version_cmp"
VERSION_LOCAL = "version_local"
VERSION_NIXPKGS = "version_nixpkgs"
VERSION_REPOLOGY = "version_repology"
VERSION_SBOM = "version_sbom"
VERSION_UPSTREAM = "version_upstream"
VULN_ID = "vuln_id"
WHITELIST = "whitelist"
WHITELIST_COMMENT = "whitelist_comment"


================================================
FILE: src/common/df.py
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

"""Shared dataframe helpers."""

import csv
import logging
import urllib.error
from typing import Literal, cast, overload

import pandas as pd
from tabulate import tabulate

from common.errors import CsvLoadError
from common.log import LOG


def df_to_csv_file(df, name, loglevel=logging.INFO):
    """Write dataframe to csv file."""
    df.to_csv(
        path_or_buf=name, quoting=csv.QUOTE_ALL, sep=",", index=False, encoding="utf-8"
    )
    LOG.log(loglevel, "Wrote: %s", name)


@overload
def df_from_csv_file(name, exit_on_error: Literal[True] = True) -> pd.DataFrame: ...


@overload
def df_from_csv_file(
    name,
    exit_on_error: Literal[False],
) -> pd.DataFrame | None: ...


def df_from_csv_file(name, exit_on_error=True):
    """Read csv file into dataframe."""
    LOG.debug("Reading: %s", name)
    try:
        df = pd.read_csv(name, keep_default_na=False, dtype=str)
        df.reset_index(drop=True, inplace=True)
        return df
    except (
        pd.errors.EmptyDataError,
        pd.errors.ParserError,
        urllib.error.HTTPError,
        urllib.error.URLError,
    ) as error:
        if exit_on_error:
            raise CsvLoadError(name, error) from error
        LOG.debug("Error reading csv file '%s':\n%s", name, error)
        return None


def df_regex_filter(df: pd.DataFrame, column: str, regex: str) -> pd.DataFrame:
    """Return rows where column `column` values match the given regex."""
    LOG.debug("column:'%s', regex:'%s'", column, regex)
    return cast(pd.DataFrame, df[df[column].str.contains(regex, regex=True, na=False)])


def df_log(df, loglevel, tablefmt="presto"):
    """Log dataframe with given loglevel and tablefmt."""
    if LOG.isEnabledFor(loglevel):
        if df is None or df.empty:
            return
        df = df.fillna("")
        table = tabulate(
            df, headers="keys", tablefmt=tablefmt, stralign="left", showindex=False
        )
        LOG.log(loglevel, "\n%s\n", table)


================================================
FILE: src/common/errors.py
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

"""Shared exception types for expected user-facing failures."""

import os
import shlex


class SbomnixError(RuntimeError):
    """Base class for expected user-facing errors."""


class FlakeRefResolutionError(SbomnixError):
    """Raised when an input looks like a flakeref but cannot be resolved."""

    def __init__(self, flakeref, stderr="", action="evaluating"):
        self.flakeref = flakeref
        self.stderr = "" if stderr is None else str(stderr)
        message = f"Failed {action} flakeref '{flakeref}'"
        stderr_summary = self.stderr.strip()
        if stderr_summary:
            message += f": {stderr_summary}"
        super().__init__(message)


class FlakeRefRealisationError(FlakeRefResolutionError):
    """Raised when a flakeref resolves but cannot be force-realised."""

    def __init__(self, flakeref, stderr=""):
        super().__init__(flakeref, stderr=stderr, action="force-realising")


class CsvLoadError(SbomnixError):
    """Raised when a CSV input cannot be read."""

    def __init__(self, name, error):
        self.name = name
        self.error = error
        super().__init__(f"Error reading csv file '{name}':\n{error}")


class CommandNotFoundError(SbomnixError):
    """Raised when a required executable is not available in PATH."""

    def __init__(self, name):
        self.name = name
        super().__init__(f"command '{name}' is not in PATH")


class NixCommandError(SbomnixError):
    """Raised when a required Nix command fails."""

    def __init__(self, command, stderr="", stdout=""):
        self.command = _format_command(command)
        self.stderr = "" if stderr is None else str(stderr)
        self.stdout = "" if stdout is None else str(stdout)
        message = f"Failed running Nix command `{self.command}`"
        detail = self.stderr.strip() or self.stdout.strip()
        if detail:
            message += f": {detail}"
        super().__init__(message)


class InvalidNixArtifactError(SbomnixError):
    """Raised when a CLI target is not a valid nix artifact."""

    def __init__(self, path):
        self.path = path
        super().__init__(f"Specified target is not a nix artifact: '{path}'")


class InvalidNixJsonError(SbomnixError):
    """Raised when a Nix JSON interface returns an unsupported shape."""

    def __init__(self, command, detail):
        self.command = command
        self.detail = detail
        super().__init__(
            f"Unexpected JSON from `{command}`: {detail}. "
            "The pinned Nix output schema may have changed; refusing to continue."
        )


class MissingNixDeriverError(SbomnixError):
    """Raised when a nix artifact cannot be mapped back to a derivation."""

    def __init__(self, path):
        self.path = path
        super().__init__(f"No deriver found for: '{path}'")


class MissingNixDerivationMetadataError(SbomnixError):
    """Raised when an artifact has no derivation metadata to model as a package."""

    def __init__(self, path):
        self.path = path
        super().__init__(f"No derivation metadata found for: '{path}'")


class MissingNixOutPathError(SbomnixError):
    """Raised when a derivation does not expose an out path."""

    def __init__(self, path):
        self.path = path
        super().__init__(f"No outpath found for: '{path}'")


class InvalidCpeDictionaryError(SbomnixError):
    """Raised when the downloaded CPE dictionary has invalid columns."""

    def __init__(self, required_cols):
        self.required_cols = tuple(sorted(required_cols))
        super().__init__(
            f"Missing required columns {list(self.required_cols)} from cpedict"
        )


class WhitelistApplicationError(SbomnixError):
    """Raised when vulnerability whitelist application cannot proceed."""

    def __init__(self, message):
        super().__init__(message)


class InvalidSbomError(SbomnixError):
    """Raised when a supplied SBOM path is invalid."""

    def __init__(self, path):
        self.path = path
        super().__init__(f"Specified sbom target is not a json file: '{path}'")


def _format_command(command):
    if isinstance(command, bytes):
        return command.decode(errors="replace")
    if isinstance(command, str):
        return command
    return shlex.join(os.fspath(part) for part in command)


================================================
FILE: src/common/flakeref.py
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

"""Flakeref resolution helpers."""

import logging
import pathlib
import re

from common.errors import FlakeRefRealisationError, FlakeRefResolutionError
from common.log import LOG, LOG_VERBOSE
from common.nix_utils import parse_nix_derivation_show
from common.proc import ExecCmdFn, exec_cmd, nix_cmd

NIXOS_CONFIGURATION_TOPLEVEL_SUFFIX = ".config.system.build.toplevel"
_NIXOS_CONFIGURATION_PREFIX_RE = re.compile(
    r"^(?P<flake>.+)#nixosConfigurations\.(?P<rest>.+)$"
)
_UNQUOTED_ATTR_SEGMENT_RE = re.compile(r"^[A-Za-z0-9_'-]+$")
_NIX_STRING_ESCAPES = {
    '"': '"',
    "\\": "\\",
    "n": "\n",
    "r": "\r",
    "t": "\t",
}


def try_resolve_flakeref(  # noqa: PLR0913
    flakeref: str,
    force_realise: bool = False,
    impure: bool = False,
    derivation: bool = False,
    *,
    exec_cmd_fn: ExecCmdFn | None = None,
    log: logging.Logger | None = None,
) -> str | None:
    """
    Resolve flakeref to out-path, force-realising the output if
    ``force_realise`` is True.
    """
    exec_cmd_fn = exec_cmd if exec_cmd_fn is None else exec_cmd_fn
    log = LOG if log is None else log

    looks_like_flakeref = _looks_like_flakeref(flakeref)
    if derivation and not force_realise and looks_like_flakeref:
        log.info("Evaluating flakeref '%s'", flakeref)
        cmd = nix_cmd("derivation", "show", flakeref, impure=impure)
        ret = exec_cmd_fn(cmd, raise_on_error=False, return_error=True, log_error=False)
        if ret is None or ret.returncode != 0:
            raise FlakeRefResolutionError(flakeref, ret.stderr if ret else "")
        drv_paths = parse_nix_derivation_show(ret.stdout)
        drv_path = next(iter(drv_paths), "")
        if not drv_path:
            raise FlakeRefResolutionError(
                flakeref,
                "nix derivation show returned no derivation path",
            )
        log.debug("flakeref='%s' maps to derivation='%s'", flakeref, drv_path)
        return drv_path

    if force_realise and looks_like_flakeref:
        log.info("Realising flakeref '%s'", flakeref)
        cmd = nix_cmd(
            "build",
            "--no-link",
            "--print-out-paths",
            flakeref,
            impure=impure,
        )
        ret = exec_cmd_fn(cmd, raise_on_error=False, return_error=True, log_error=False)
        if ret is None or ret.returncode != 0:
            raise FlakeRefRealisationError(flakeref, ret.stderr if ret else "")
        nixpath = _first_output_path(ret.stdout)
        if not nixpath:
            raise FlakeRefRealisationError(
                flakeref,
                "nix build returned no output path",
            )
        log.debug("flakeref='%s' maps to path='%s'", flakeref, nixpath)
        return nixpath

    if looks_like_flakeref:
        log.info("Evaluating flakeref '%s'", flakeref)
    else:
        log.log(LOG_VERBOSE, "Evaluating '%s'", flakeref)
    cmd = nix_cmd("eval", "--raw", flakeref, impure=impure)
    ret = exec_cmd_fn(cmd, raise_on_error=False, return_error=True, log_error=False)
    if ret is None or ret.returncode != 0:
        if looks_like_flakeref:
            raise FlakeRefResolutionError(flakeref, ret.stderr if ret else "")
        log.debug("not a flakeref: '%s'", flakeref)
        return None
    nixpath = ret.stdout.strip()
    log.debug("flakeref='%s' maps to path='%s'", flakeref, nixpath)
    if not force_realise:
        return nixpath
    log.info("Realising flakeref '%s'", flakeref)
    cmd = nix_cmd("build", "--no-link", flakeref, impure=impure)
    ret = exec_cmd_fn(cmd, raise_on_error=False, return_error=True, log_error=False)
    if ret is None or ret.returncode != 0:
        raise FlakeRefRealisationError(flakeref, ret.stderr if ret else "")
    return nixpath


def _first_output_path(stdout: str) -> str:
    """Return the first output path printed by ``nix build --print-out-paths``."""
    return next((line.strip() for line in stdout.splitlines() if line.strip()), "")


def parse_nixos_configuration_ref(
    flakeref: str,
    *,
    suffix: str = "",
) -> tuple[str, str] | None:
    """
    Parse ``<flake>#nixosConfigurations.<name><suffix>``.

    ``name`` may be either an unquoted attr segment or a quoted segment such as
    ``"host.example.com"``. The returned name is decoded and safe to re-quote.
    """
    match = _NIXOS_CONFIGURATION_PREFIX_RE.match(flakeref or "")
    if not match:
        return None
    parsed = _consume_nix_attr_segment(match.group("rest"))
    if not parsed:
        return None
    name, tail = parsed
    if tail != suffix:
        return None
    return match.group("flake"), name


def quote_nix_attr_segment(name: str) -> str:
    """Return a safely quoted Nix attr path segment."""
    escaped = []
    idx = 0
    while idx < len(name):
        if name.startswith("${", idx):
            escaped.append(r"\${")
            idx += 2
            continue
        char = name[idx]
        if char == '"':
            escaped.append('\\"')
        elif char == "\\":
            escaped.append("\\\\")
        elif char == "\n":
            escaped.append("\\n")
        elif char == "\r":
            escaped.append("\\r")
        elif char == "\t":
            escaped.append("\\t")
        else:
            escaped.append(char)
        idx += 1
    return '"' + "".join(escaped) + '"'


def _consume_nix_attr_segment(value: str) -> tuple[str, str] | None:
    if not value:
        return None
    if value.startswith('"'):
        end = _find_quoted_attr_end(value)
        if end is None:
            return None
        raw_segment = value[: end + 1]
        segment = _decode_nix_quoted_attr_segment(raw_segment)
        if segment is None:
            return None
        return segment, value[end + 1 :]

    segment, separator, tail = value.partition(".")
    if not segment or not _UNQUOTED_ATTR_SEGMENT_RE.match(segment):
        return None
    return segment, f"{separator}{tail}" if separator else ""


def _decode_nix_quoted_attr_segment(value: str) -> str | None:
    end = len(value) - 1
    if len(value) < 2 or value[0] != '"' or value[end] != '"':
        return None

    decoded = []
    idx = 1
    while idx < end:
        char = value[idx]
        if char == "$" and idx + 1 < end and value[idx + 1] == "{":
            return None
        if char != "\\":
            decoded.append(char)
            idx += 1
            continue

        idx += 1
        if idx >= end:
            return None
        escaped = value[idx]
        if escaped == "$" and idx + 1 < end and value[idx + 1] == "{":
            decoded.append("${")
            idx += 2
            continue
        decoded.append(_NIX_STRING_ESCAPES.get(escaped, f"\\{escaped}"))
        idx += 1
    return "".join(decoded)


def _find_quoted_attr_end(value: str) -> int | None:
    escaped = False
    for idx, char in enumerate(value[1:], start=1):
        if escaped:
            escaped = False
            continue
        if char == "\\":
            escaped = True
            continue
        if char == '"':
            return idx
    return None


def _looks_like_flakeref(flakeref: str) -> bool:
    """Return true if the input is likely intended as a flake reference."""
    looks_like = False
    if flakeref:
        path = pathlib.Path(flakeref)
        if path.exists():
            looks_like = path.is_dir() and (path / "flake.nix").exists()
        else:
            looks_like = (
                flakeref.startswith("nixpkgs=")
                or "#" in flakeref
                or "?" in flakeref
                or re.match(r"^[A-Za-z][A-Za-z0-9+.-]*:", flakeref) is not None
            )
    return looks_like


================================================
FILE: src/common/http.py
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

"""Shared HTTP session primitives."""

from collections.abc import Collection
from typing import Any

from requests import Session
from requests.adapters import HTTPAdapter
from requests_cache import CacheMixin
from requests_ratelimiter import LimiterMixin
from urllib3.util.retry import Retry

DEFAULT_RETRY_STATUS_CODES = (429, 500, 502, 503, 504)


class CachedLimiterSession(CacheMixin, LimiterMixin, Session):  # pyright: ignore[reportIncompatibleMethodOverride]
    """
    Session class with caching and rate-limiting.
    https://requests-cache.readthedocs.io/en/stable/user_guide/compatibility.html
    """


def mount_retries(
    session: Session,
    *,
    allowed_methods: Collection[str] = frozenset(("GET", "HEAD")),
) -> Session:
    """Attach a retrying adapter to a requests session."""
    retry = Retry(
        total=3,
        connect=3,
        read=3,
        status=3,
        backoff_factor=1,
        status_forcelist=DEFAULT_RETRY_STATUS_CODES,
        allowed_methods=allowed_methods,
        raise_on_status=False,
        respect_retry_after_header=True,
    )
    adapter = HTTPAdapter(max_retries=retry)
    session.mount("http://", adapter)
    session.mount("https://", adapter)
    return session


def create_cached_limited_session(
    *,
    per_second: int | None = None,
    per_minute: int | None = None,
    expire_after: int | None = None,
    user_agent: str | None = None,
    allowed_methods: Collection[str] = frozenset(("GET", "HEAD")),
) -> Session:
    """Create a cached, rate-limited session with retry policy attached."""
    kwargs: dict[str, Any] = {}
    if per_second is not None:
        kwargs["per_second"] = per_second
    if per_minute is not None:
        kwargs["per_minute"] = per_minute
    if expire_after is not None:
        kwargs["expire_after"] = expire_after
    session = CachedLimiterSession(**kwargs)
    mount_retries(session, allowed_methods=allowed_methods)
    if user_agent:
        session.headers.update({"User-Agent": user_agent})
    return session


================================================
FILE: src/common/log.py
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

"""Shared logging configuration and logger access."""

import logging
import os
from typing import Any, cast

from colorlog import ColoredFormatter, default_log_colors

LOG_VERBOSE = 15
LOG_SPAM = logging.DEBUG - 1
LOG_TRACE = LOG_SPAM
LOG_LEVELS = [logging.INFO, LOG_VERBOSE, logging.DEBUG, LOG_SPAM]


class SbomnixLogger(logging.Logger):
    """Project logger with sbomnix-specific verbose levels."""

    def verbose(self, msg: object, *args: object, **kwargs: Any) -> None:
        """Log at the project VERBOSE level."""
        if self.isEnabledFor(LOG_VERBOSE):
            kwargs.setdefault("stacklevel", 2)
            self._log(LOG_VERBOSE, msg, args, **kwargs)

    def spam(self, msg: object, *args: object, **kwargs: Any) -> None:
        """Log at the project SPAM level."""
        if self.isEnabledFor(LOG_SPAM):
            kwargs.setdefault("stacklevel", 2)
            self._log(LOG_SPAM, msg, args, **kwargs)

    def trace(self, msg: object, *args: object, **kwargs: Any) -> None:
        """Log at the project TRACE level alias."""
        if self.isEnabledFor(LOG_TRACE):
            kwargs.setdefault("stacklevel", 2)
            self._log(LOG_TRACE, msg, args, **kwargs)


__all__ = [
    "LOG",
    "LOG_SPAM",
    "LOG_TRACE",
    "LOG_VERBOSE",
    "is_debug_enabled",
    "set_log_verbosity",
]


logging.addLevelName(LOG_VERBOSE, "VERBOSE")
logging.addLevelName(LOG_SPAM, "SPAM")
logging.setLoggerClass(SbomnixLogger)

LOG = cast(SbomnixLogger, logging.getLogger(os.path.abspath(__file__)))


def set_log_verbosity(verbosity=0):
    """Set logging verbosity."""
    verbosity = min(len(LOG_LEVELS) - 1, max(verbosity, 0))
    _init_logging(verbosity)


def _init_logging(verbosity=0):
    """Initialize logging."""
    level = LOG_LEVELS[verbosity]
    if level <= logging.DEBUG:
        logformat = (
            "%(log_color)s%(levelname)-8s%(reset)s "
            "%(filename)s:%(funcName)s():%(lineno)d "
            "%(message)s"
        )
    else:
        logformat = "%(log_color)s%(levelname)-8s%(reset)s %(message)s"
    log_colors = {
        **default_log_colors,
        "INFO": "fg_bold_white",
        "VERBOSE": "fg_bold_cyan",
        "DEBUG": "fg_bold_white",
        "SPAM": "fg_bold_white",
    }
    if LOG.handlers:
        stream = LOG.handlers[0]
    else:
        stream = logging.StreamHandler()
    formatter = ColoredFormatter(
        logformat,
        log_colors=log_colors,
        stream=getattr(stream, "stream", None),
    )
    stream.setFormatter(formatter)
    if not LOG.handlers:
        LOG.addHandler(stream)
    LOG.setLevel(level)


def is_debug_enabled():
    """Return True when project logging is enabled for DEBUG details."""
    return LOG.isEnabledFor(logging.DEBUG)


set_log_verbosity(0)


================================================
FILE: src/common/nix_utils.py
================================================
# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)
#
# SPDX-License-Identifier: Apache-2.0

"""Helpers for normalizing nix store paths and derivation JSON."""

import json
import os
import re

from common.errors import InvalidNixJsonError

RE_NIX_STORE_PATH_BASENAME = re.compile(r"^[0-9a-z]{32}-.+")
RE_NIX_STORE_PATH = re.compile(r"(?P<store_path>/(?:[^/\s:]+/)+[0-9a-z]{32}-[^/\s:]+)")
NIX_DERIVATION_SHOW_JSON = "nix derivation show"
NIX_PATH_INFO_JSON = "nix path-info --json --json-format 1"


def get_nix_store_dir(path=None, default: str | None = "/nix/store") -> str | None:
    """Infer the nix store directory from an absolute store path-like string."""
    if path:
        match = RE_NIX_STORE_PATH.search(str(path))
        if match:
            return os.path.dirname(match.group("store_path"))
    return default


def normalize_nix_store_path(path, store_dir="/nix/store"):
    """Return an absolute store path for basename-only store path strings."""
    if not isinstance(path, str) or not path:
        return path
    if os.path.isabs(path) or not RE_NIX_STORE_PATH_BASENAME.match(path):
        return path
    return os.path.join(store_dir, path)


def _iter_nix_store_dir_candidates(value):
    """Yield strings that may reveal the nix store directory."""
    if isinstance(value, str):
        yield value
    elif isinstance(value, dict):
        for item in value.values():
            yield from _iter_nix_store_dir_candidates(item)
    elif isinstance(value, (list, tuple)):
        for item in value:
            yield from _iter_nix_store_dir_candidates(item)


def _infer_nix_store_dir(drv_info, default="/nix/store"):
    """Infer the nix store directory from derivation fields when keys are relative."""
    if not isinstance(drv_info, dict):
        return default
    for candidate in _iter_nix_store_dir_candidates(
        {
            "builder": drv_info.get("builder"),
            "outputs": drv_info.get("outputs"),
            "env": drv_info.get("env"),
        }
    ):
        store_dir = get_nix_store_dir(candidate, default=None)
        if store_dir:
            return store_dir
    return default


def _normalize_nix_derivation_info(drv_info, store_dir):
    """Normalize basename-only store paths within derivation info."""
    if not isinstance(drv_info, dict):
        return drv_info

    normalized = dict(drv_info)

    outputs = normalized.get("outputs")
    if isinstance(outputs, dict):
        normalized["outputs"] = {}
        for name, output in outputs.items():
            normalized_output = output
            if isinstance(output, dict):
                normalized_output = dict(output)
                if normalized_output.get("path"):
                    normalized_output["path"] = normalize_nix_store_path(
                        normalized_output["path"], store_dir
                    )
            normalized["outputs"][name] = normalized_output

    env = normalized.get("env")
    if isinstance(env, dict):
        normalized["env"] = {
            key: normalize_nix_store_path(value, store_dir)
            for key, value in env.items()
        }

    inputs = normalized.get("inputs")
    if isinstance(inputs, dict):
        normalized_inputs = dict(inputs)
        srcs = normalized_inputs.get("srcs")
        if isinstance(srcs, list):
            normalized_inputs["srcs"] = [
                normalize_nix_store_path(src, store_dir) for src in srcs
            ]
        drvs = normalized_inputs.get("drvs")
        if isinstance(drvs, dict):
            normalized_inputs["drvs"] = {
                normalize_nix_store_path(path, store_dir): outputs
                for path, outputs in drvs.items()
            }
        normalized["inputs"] = normalized_inputs

    return normalized


def load_nix_json(stdout, command):
    """Load JSON produced by a Nix command and raise a user-facing error on drift."""
    try:
        return json.loads(stdout)
    except json.JSONDecodeError as error:
        raise InvalidNixJsonError(command, f"invalid JSON: {error.msg}") from error


def parse_nix_derivation_show(stdout, store_path_hint=None):
    """Normalize `nix derivation show` JSON across direct and wrapped formats."""
    payload = load_nix_json(stdout, NIX_DERIVATION_SHOW_JSON)
    if not isinstance(payload, dict):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected top-level object, got {type(payload).__name__}",
        )
    derivations = payload.get("derivations", payload)
    if not isinstance(derivations, dict):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected `derivations` object, got {type(derivations).__name__}",
        )

    normalized = {}
    default_store_dir = get_nix_store_dir(store_path_hint) or "/nix/store"
    for drv_path, drv_info in derivations.items():
        _validate_derivation_entry(drv_path, drv_info)
        store_dir = get_nix_store_dir(drv_path, default=None)
        if not store_dir:
            store_dir = _infer_nix_store_dir(drv_info, default=default_store_dir)
        normalized_drv_path = normalize_nix_store_path(drv_path, store_dir)
        normalized[normalized_drv_path] = _normalize_nix_derivation_info(
            drv_info, store_dir
        )
    return normalized


def _validate_derivation_entry(drv_path, drv_info):
    """Validate the `nix derivation show` fields consumed by this project."""
    if not isinstance(drv_path, str) or not drv_path:
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            "expected derivation keys to be non-empty strings",
        )
    if not isinstance(drv_info, dict):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected derivation `{drv_path}` to be an object",
        )
    _validate_optional_mapping(drv_info, "env", f"derivation `{drv_path}`")
    _validate_derivation_outputs(drv_path, drv_info)
    _check_optional_derivation_inputs(drv_path, drv_info)


def _validate_optional_mapping(record, field, owner):
    value = record.get(field)
    if value is not None and not isinstance(value, dict):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected `{field}` in {owner} to be an object",
        )


def _validate_derivation_outputs(drv_path, drv_info):
    outputs = drv_info.get("outputs")
    if outputs is None:
        return
    if not isinstance(outputs, dict):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected `outputs` in derivation `{drv_path}` to be an object",
        )
    for output_name, output in outputs.items():
        if not isinstance(output_name, str) or not output_name:
            raise InvalidNixJsonError(
                NIX_DERIVATION_SHOW_JSON,
                f"expected output names in derivation `{drv_path}` to be strings",
            )
        if not isinstance(output, dict):
            raise InvalidNixJsonError(
                NIX_DERIVATION_SHOW_JSON,
                f"expected output `{output_name}` in derivation `{drv_path}` "
                "to be an object",
            )
        output_path = output.get("path")
        if output_path is not None and not isinstance(output_path, str):
            raise InvalidNixJsonError(
                NIX_DERIVATION_SHOW_JSON,
                f"expected output `{output_name}` path in derivation `{drv_path}` "
                "to be a string",
            )


def _check_optional_derivation_inputs(drv_path, drv_info):
    """Validate input shape without requiring callers to consume dependencies."""
    inputs = drv_info.get("inputs")
    if inputs is not None:
        if not isinstance(inputs, dict):
            raise InvalidNixJsonError(
                NIX_DERIVATION_SHOW_JSON,
                f"expected `inputs` in derivation `{drv_path}` to be an object",
            )
        _validate_optional_mapping(inputs, "drvs", f"`inputs` for `{drv_path}`")
        srcs = inputs.get("srcs")
        if srcs is not None:
            if not isinstance(srcs, list):
                raise InvalidNixJsonError(
                    NIX_DERIVATION_SHOW_JSON,
                    f"expected `inputs.srcs` in derivation `{drv_path}` to be a list",
                )
            _validated_path_values(
                srcs,
                f"`inputs.srcs` in derivation `{drv_path}`",
                NIX_DERIVATION_SHOW_JSON,
            )
    _reject_legacy_derivation_inputs(drv_path, drv_info)


def _reject_legacy_derivation_inputs(drv_path, drv_info):
    for field in ("inputDrvs", "inputSrcs"):
        if field in drv_info:
            raise InvalidNixJsonError(
                NIX_DERIVATION_SHOW_JSON,
                f"unsupported legacy `{field}` in derivation `{drv_path}`",
            )


def nix_derivation_input_drv_paths(drv_path, drv_info):
    """Return validated input derivation paths from normalized derivation JSON."""
    inputs = _require_derivation_inputs(drv_path, drv_info)
    if "drvs" not in inputs:
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"missing `inputs.drvs` in derivation `{drv_path}`",
        )
    drvs = inputs["drvs"]
    if not isinstance(drvs, dict):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected `inputs.drvs` in derivation `{drv_path}` to be an object",
        )
    return _validated_path_keys(
        drvs,
        f"`inputs.drvs` in derivation `{drv_path}`",
        NIX_DERIVATION_SHOW_JSON,
    )


def nix_derivation_input_src_paths(drv_path, drv_info):
    """Return validated direct source inputs from normalized derivation JSON."""
    inputs = _require_derivation_inputs(drv_path, drv_info)
    if "srcs" not in inputs:
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"missing `inputs.srcs` in derivation `{drv_path}`",
        )
    srcs = inputs["srcs"]
    if not isinstance(srcs, list):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected `inputs.srcs` in derivation `{drv_path}` to be a list",
        )
    return _validated_path_values(
        srcs,
        f"`inputs.srcs` in derivation `{drv_path}`",
        NIX_DERIVATION_SHOW_JSON,
    )


def _require_derivation_inputs(drv_path, drv_info):
    """Return the validated modern derivation input object.

    Parsing derivation metadata only validates an optional ``inputs`` object
    because some callers use unrelated fields. Graph construction depends on
    the modern dependency schema, so this accessor requires ``inputs`` and the
    field-specific accessors require both ``inputs.drvs`` and ``inputs.srcs``.
    Real leaf derivations still expose those fields as empty containers.
    """
    if not isinstance(drv_info, dict):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected derivation `{drv_path}` to be an object",
        )
    _reject_legacy_derivation_inputs(drv_path, drv_info)
    if "inputs" not in drv_info:
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"missing derivation inputs in `{drv_path}`",
        )
    inputs = drv_info["inputs"]
    if not isinstance(inputs, dict):
        raise InvalidNixJsonError(
            NIX_DERIVATION_SHOW_JSON,
            f"expected `inputs` in derivation `{drv_path}` to be an object",
        )
    return inputs


def normalize_nix_path_info(path_info, *, command=NIX_PATH_INFO_JSON):
    """Normalize and validate Nix path-info JSON to a path-indexed dictionary."""
    if isinstance(path_info, dict):
        normalized = {}
        for path, info in path_info.items():
            if not isinstance(path, str) or not path:
                raise InvalidNixJsonError(
                    command,
                    "expected path-info object keys to be non-empty strings",
                )
            if not isinstance(info, dict):
                raise InvalidNixJsonError(
                    command,
                    f"expected path-info record for `{path}` to be an object",
                )
            normalized[path] = info
        return normalized

    if isinstance(path_info, list):
        normalized = {}
        for index, info in enumerate(path_info):
            if not isinstance(info, dict):
                raise InvalidNixJsonError(
                    command,
                    f"expected path-info list item {index} to be an object",
                )
            path = info.get("path") or info.get("storePath")
            if not isinstance(path, str) or not path:
                raise InvalidNixJsonError(
                    command,
                    f"missing path string in path-info list item {index}",
                )
            normalized[path] = info
        return normalized

    raise InvalidNixJsonError(
        command,
        f"expected top-level object or list, got {type(path_info).__name__}",
    )


def nix_path_info_references(info, path, *, command=NIX_PATH_INFO_JSON):
    """Return validated path-info references for a store path."""
    if "references" not in info:
        raise InvalidNixJsonError(
            command,
            f"missing `references` in path-info for `{path}`",
        )
    references = info["references"]
    if not isinstance(references, list):
        raise InvalidNixJsonError(
            command,
            f"expected `references` in path-info for `{path}` to be a list",
        )
    for index, reference in enumerate(references):
        if not isinstance(reference, str) or not reference:
            raise InvalidNixJsonError(
                command,
                f"expected `references[{index}]` in path-info for `{path}` "
                "to be a non-empty string",
            )
    return references


def nix_path_info_deriver(info, path, *, command=NIX_PATH_INFO_JSON):
    """Return a validated path-info deriver value, or None when absent."""
    deriver = info.get("deriver")
    if deriver is None or deriver == "":
        return None
    if not isinstance(deriver, str):
        raise InvalidNixJsonError(
            command,
            f"expected `deriver` in path-info for `{path}` to be a string or null",
        )
    return deriver


def nix_path_info_nar_hash(info, path, *, command=NIX_PATH_INFO_JSON):
    """Return a validated path-info NAR hash."""
    nar_hash = info.get("narHash")
    if not isinstance(nar_hash, str) or not nar_hash:
        raise InvalidNixJsonError(
            command,
            f"missing `narHash` string in path-info for `{path}`"
Download .txt
gitextract_rba_m7yr/

├── .envrc
├── .github/
│   ├── dependabot.yml
│   └── workflows/
│       ├── codeql.yml
│       ├── dependency-review.yml
│       ├── release_sbomnix.yml
│       ├── scorecards.yml
│       └── test_sbomnix.yml
├── .gitignore
├── .gitlint
├── LICENSES/
│   ├── Apache-2.0.txt
│   ├── BSD-3-Clause.txt
│   ├── CC-BY-3.0.txt
│   ├── CC-BY-SA-4.0.txt
│   └── MIT.txt
├── README.md
├── REUSE.toml
├── VERSION
├── default.nix
├── doc/
│   ├── nix_outdated.md
│   ├── nixgraph.md
│   ├── nixmeta.md
│   ├── provenance.md
│   ├── repology_cli.md
│   └── vulnxscan.md
├── flake.nix
├── nix/
│   ├── apps.nix
│   ├── default.nix
│   ├── formatter.nix
│   ├── git-hooks.nix
│   └── packages.nix
├── pyproject.toml
├── pyrightconfig.json
├── pytest.ini
├── scripts/
│   ├── check-fast.sh
│   ├── check-full.sh
│   ├── release-asset.sh
│   └── run-pytest-lane.sh
├── shell.nix
├── src/
│   ├── common/
│   │   ├── __init__.py
│   │   ├── cli_args.py
│   │   ├── columns.py
│   │   ├── df.py
│   │   ├── errors.py
│   │   ├── flakeref.py
│   │   ├── http.py
│   │   ├── log.py
│   │   ├── nix_utils.py
│   │   ├── package_names.py
│   │   ├── pkgmeta.py
│   │   ├── proc.py
│   │   ├── regex.py
│   │   ├── spdx.py
│   │   └── versioning.py
│   ├── nixgraph/
│   │   ├── __init__.py
│   │   ├── graph.py
│   │   ├── main.py
│   │   └── render.py
│   ├── nixmeta/
│   │   ├── __init__.py
│   │   ├── flake_metadata.py
│   │   ├── main.py
│   │   ├── metadata_json.py
│   │   └── scanner.py
│   ├── nixupdate/
│   │   ├── __init__.py
│   │   ├── nix_outdated.py
│   │   ├── nix_visualize.py
│   │   ├── pipeline.py
│   │   └── report.py
│   ├── provenance/
│   │   ├── __init__.py
│   │   ├── dependencies.py
│   │   ├── digests.py
│   │   ├── main.py
│   │   ├── nix_commands.py
│   │   ├── path_info.py
│   │   ├── schema.py
│   │   └── subjects.py
│   ├── repology/
│   │   ├── __init__.py
│   │   ├── adapter.py
│   │   ├── cves.py
│   │   ├── exceptions.py
│   │   ├── projects_parser.py
│   │   ├── repology_cli.py
│   │   ├── repology_cve.py
│   │   ├── reporting.py
│   │   ├── sbom.py
│   │   └── session.py
│   ├── sbomnix/
│   │   ├── __init__.py
│   │   ├── builder.py
│   │   ├── cdx.py
│   │   ├── cli_utils.py
│   │   ├── closure.py
│   │   ├── components.py
│   │   ├── cpe.py
│   │   ├── dependency_index.py
│   │   ├── derivation.py
│   │   ├── derivers.py
│   │   ├── dfcache.py
│   │   ├── exporters.py
│   │   ├── main.py
│   │   ├── meta.py
│   │   ├── meta_source.py
│   │   ├── runtime.py
│   │   └── vuln_enrichment.py
│   └── vulnxscan/
│       ├── __init__.py
│       ├── github_prs.py
│       ├── osv.py
│       ├── osv_client.py
│       ├── parsers.py
│       ├── repology_lookup.py
│       ├── reporting.py
│       ├── scanners.py
│       ├── triage.py
│       ├── utils.py
│       ├── vulnscan.py
│       ├── vulnxscan_cli.py
│       └── whitelist.py
└── tests/
    ├── __init__.py
    ├── compare_deps.py
    ├── compare_sboms.py
    ├── conftest.py
    ├── integration/
    │   ├── __init__.py
    │   ├── test_nixgraph_cli.py
    │   ├── test_nixmeta_cli.py
    │   ├── test_nixupdate_cli.py
    │   ├── test_provenance_cli.py
    │   ├── test_repology_cli.py
    │   ├── test_sbomnix_cli.py
    │   └── test_vulnxscan_cli.py
    ├── resources/
    │   ├── README.md
    │   ├── cdx_bom-1.3.schema.json
    │   ├── cdx_bom-1.4.schema.json
    │   ├── grype-test-db.tar.gz.license
    │   ├── jsf-0.82.schema.json
    │   ├── make_grype_test_db.py
    │   ├── nixmeta-package-set.nix
    │   ├── provenance-1.0.schema.json
    │   ├── repology/
    │   │   ├── cves_openssl.html
    │   │   ├── projects_empty.html
    │   │   └── projects_hello.html
    │   ├── sample_cdx_sbom.json
    │   ├── spdx.schema.json
    │   ├── spdx_bom-2.3.schema.json
    │   └── test-derivation-chain.nix
    ├── test_builder_runtime.py
    ├── test_buildtime_closure.py
    ├── test_cli_conventions.py
    ├── test_cli_error_boundaries.py
    ├── test_cli_smoke.py
    ├── test_common_log.py
    ├── test_common_versioning.py
    ├── test_compare_deps.py
    ├── test_components.py
    ├── test_cpe.py
    ├── test_dependency_index.py
    ├── test_derivation_hardening.py
    ├── test_flakeref_resolution.py
    ├── test_library_exceptions.py
    ├── test_nix_cli_argv.py
    ├── test_nix_outdated_pipeline.py
    ├── test_nix_target_resolution.py
    ├── test_nix_utils_parsing.py
    ├── test_nixgraph_graph.py
    ├── test_nixmeta_parsing.py
    ├── test_nixmeta_progress.py
    ├── test_nixmeta_source.py
    ├── test_nixmeta_source_export.py
    ├── test_osv_client.py
    ├── test_provenance_batching.py
    ├── test_provenance_path_info.py
    ├── test_provenance_subjects.py
    ├── test_repology_adapter.py
    ├── test_repology_cve.py
    ├── test_repology_projects_parser.py
    ├── test_repology_sbom.py
    ├── test_runtime_closure.py
    ├── test_sbom_closure.py
    ├── test_sbom_vuln_enrichment.py
    ├── test_schema_validation.py
    ├── test_store_batching.py
    ├── test_temp_sbom_generation.py
    ├── test_vulnix_test_support.py
    ├── test_vulnxscan_engine.py
    ├── test_vulnxscan_triage.py
    ├── test_whitelist.py
    ├── testpaths.py
    ├── testutils.py
    └── vulnix_test_support.py
Download .txt
SYMBOL INDEX (841 symbols across 122 files)

FILE: src/common/cli_args.py
  class _VerboseCountAction (line 17) | class _VerboseCountAction(argparse.Action):
    method __init__ (line 20) | def __init__(self, option_strings, dest, nargs=0, **kwargs):
    method __call__ (line 25) | def __call__(self, _parser, namespace, _values, _option_string=None):
  function check_positive (line 31) | def check_positive(val):
  function _is_integer (line 39) | def _is_integer(value):
  function _normalize_verbose_args (line 48) | def _normalize_verbose_args(args):
  function _finalize_verbose_namespace (line 72) | def _finalize_verbose_namespace(namespace):
  function _wrap_verbose_parser (line 79) | def _wrap_verbose_parser(parser):
  function add_verbose_argument (line 97) | def add_verbose_argument(parser, default=0, max_level=3, root_parser=None):
  function add_version_argument (line 124) | def add_version_argument(parser, package="sbomnix"):

FILE: src/common/df.py
  function df_to_csv_file (line 19) | def df_to_csv_file(df, name, loglevel=logging.INFO):
  function df_from_csv_file (line 28) | def df_from_csv_file(name, exit_on_error: Literal[True] = True) -> pd.Da...
  function df_from_csv_file (line 32) | def df_from_csv_file(
  function df_from_csv_file (line 38) | def df_from_csv_file(name, exit_on_error=True):
  function df_regex_filter (line 57) | def df_regex_filter(df: pd.DataFrame, column: str, regex: str) -> pd.Dat...
  function df_log (line 63) | def df_log(df, loglevel, tablefmt="presto"):

FILE: src/common/errors.py
  class SbomnixError (line 11) | class SbomnixError(RuntimeError):
  class FlakeRefResolutionError (line 15) | class FlakeRefResolutionError(SbomnixError):
    method __init__ (line 18) | def __init__(self, flakeref, stderr="", action="evaluating"):
  class FlakeRefRealisationError (line 28) | class FlakeRefRealisationError(FlakeRefResolutionError):
    method __init__ (line 31) | def __init__(self, flakeref, stderr=""):
  class CsvLoadError (line 35) | class CsvLoadError(SbomnixError):
    method __init__ (line 38) | def __init__(self, name, error):
  class CommandNotFoundError (line 44) | class CommandNotFoundError(SbomnixError):
    method __init__ (line 47) | def __init__(self, name):
  class NixCommandError (line 52) | class NixCommandError(SbomnixError):
    method __init__ (line 55) | def __init__(self, command, stderr="", stdout=""):
  class InvalidNixArtifactError (line 66) | class InvalidNixArtifactError(SbomnixError):
    method __init__ (line 69) | def __init__(self, path):
  class InvalidNixJsonError (line 74) | class InvalidNixJsonError(SbomnixError):
    method __init__ (line 77) | def __init__(self, command, detail):
  class MissingNixDeriverError (line 86) | class MissingNixDeriverError(SbomnixError):
    method __init__ (line 89) | def __init__(self, path):
  class MissingNixDerivationMetadataError (line 94) | class MissingNixDerivationMetadataError(SbomnixError):
    method __init__ (line 97) | def __init__(self, path):
  class MissingNixOutPathError (line 102) | class MissingNixOutPathError(SbomnixError):
    method __init__ (line 105) | def __init__(self, path):
  class InvalidCpeDictionaryError (line 110) | class InvalidCpeDictionaryError(SbomnixError):
    method __init__ (line 113) | def __init__(self, required_cols):
  class WhitelistApplicationError (line 120) | class WhitelistApplicationError(SbomnixError):
    method __init__ (line 123) | def __init__(self, message):
  class InvalidSbomError (line 127) | class InvalidSbomError(SbomnixError):
    method __init__ (line 130) | def __init__(self, path):
  function _format_command (line 135) | def _format_command(command):

FILE: src/common/flakeref.py
  function try_resolve_flakeref (line 30) | def try_resolve_flakeref(  # noqa: PLR0913
  function _first_output_path (line 107) | def _first_output_path(stdout: str) -> str:
  function parse_nixos_configuration_ref (line 112) | def parse_nixos_configuration_ref(
  function quote_nix_attr_segment (line 135) | def quote_nix_attr_segment(name: str) -> str:
  function _consume_nix_attr_segment (line 161) | def _consume_nix_attr_segment(value: str) -> tuple[str, str] | None:
  function _decode_nix_quoted_attr_segment (line 180) | def _decode_nix_quoted_attr_segment(value: str) -> str | None:
  function _find_quoted_attr_end (line 209) | def _find_quoted_attr_end(value: str) -> int | None:
  function _looks_like_flakeref (line 223) | def _looks_like_flakeref(flakeref: str) -> bool:

FILE: src/common/http.py
  class CachedLimiterSession (line 19) | class CachedLimiterSession(CacheMixin, LimiterMixin, Session):  # pyrigh...
  function mount_retries (line 26) | def mount_retries(
  function create_cached_limited_session (line 49) | def create_cached_limited_session(

FILE: src/common/log.py
  class SbomnixLogger (line 19) | class SbomnixLogger(logging.Logger):
    method verbose (line 22) | def verbose(self, msg: object, *args: object, **kwargs: Any) -> None:
    method spam (line 28) | def spam(self, msg: object, *args: object, **kwargs: Any) -> None:
    method trace (line 34) | def trace(self, msg: object, *args: object, **kwargs: Any) -> None:
  function set_log_verbosity (line 58) | def set_log_verbosity(verbosity=0):
  function _init_logging (line 64) | def _init_logging(verbosity=0):
  function is_debug_enabled (line 97) | def is_debug_enabled():

FILE: src/common/nix_utils.py
  function get_nix_store_dir (line 19) | def get_nix_store_dir(path=None, default: str | None = "/nix/store") -> ...
  function normalize_nix_store_path (line 28) | def normalize_nix_store_path(path, store_dir="/nix/store"):
  function _iter_nix_store_dir_candidates (line 37) | def _iter_nix_store_dir_candidates(value):
  function _infer_nix_store_dir (line 49) | def _infer_nix_store_dir(drv_info, default="/nix/store"):
  function _normalize_nix_derivation_info (line 66) | def _normalize_nix_derivation_info(drv_info, store_dir):
  function load_nix_json (line 112) | def load_nix_json(stdout, command):
  function parse_nix_derivation_show (line 120) | def parse_nix_derivation_show(stdout, store_path_hint=None):
  function _validate_derivation_entry (line 149) | def _validate_derivation_entry(drv_path, drv_info):
  function _validate_optional_mapping (line 166) | def _validate_optional_mapping(record, field, owner):
  function _validate_derivation_outputs (line 175) | def _validate_derivation_outputs(drv_path, drv_info):
  function _check_optional_derivation_inputs (line 205) | def _check_optional_derivation_inputs(drv_path, drv_info):
  function _reject_legacy_derivation_inputs (line 230) | def _reject_legacy_derivation_inputs(drv_path, drv_info):
  function nix_derivation_input_drv_paths (line 239) | def nix_derivation_input_drv_paths(drv_path, drv_info):
  function nix_derivation_input_src_paths (line 260) | def nix_derivation_input_src_paths(drv_path, drv_info):
  function _require_derivation_inputs (line 281) | def _require_derivation_inputs(drv_path, drv_info):
  function normalize_nix_path_info (line 310) | def normalize_nix_path_info(path_info, *, command=NIX_PATH_INFO_JSON):
  function nix_path_info_references (line 351) | def nix_path_info_references(info, path, *, command=NIX_PATH_INFO_JSON):
  function nix_path_info_deriver (line 374) | def nix_path_info_deriver(info, path, *, command=NIX_PATH_INFO_JSON):
  function nix_path_info_nar_hash (line 387) | def nix_path_info_nar_hash(info, path, *, command=NIX_PATH_INFO_JSON):
  function _validated_path_keys (line 398) | def _validated_path_keys(paths, owner, command):
  function _validated_path_values (line 410) | def _validated_path_values(paths, owner, command):

FILE: src/common/package_names.py
  function nix_to_repology_pkg_name (line 12) | def nix_to_repology_pkg_name(nix_pkg_name):

FILE: src/common/pkgmeta.py
  function get_py_pkg_version (line 14) | def get_py_pkg_version(package="sbomnix"):
  function _dev_version (line 22) | def _dev_version():

FILE: src/common/proc.py
  function exec_cmd (line 24) | def exec_cmd(
  function exec_cmd (line 34) | def exec_cmd(
  function exec_cmd (line 44) | def exec_cmd(
  function exec_cmd (line 53) | def exec_cmd(
  function exit_unless_command_exists (line 92) | def exit_unless_command_exists(
  function exit_unless_nix_artifact (line 104) | def exit_unless_nix_artifact(
  function nix_cmd (line 129) | def nix_cmd(*args: str, impure: bool = False) -> list[str]:

FILE: src/common/regex.py
  function regex_match (line 10) | def regex_match(regex, string):

FILE: src/common/spdx.py
  function _spdx_licensing (line 13) | def _spdx_licensing():
  function canonicalize_spdx_license_id (line 17) | def canonicalize_spdx_license_id(identifier):

FILE: src/common/versioning.py
  function number_distance (line 14) | def number_distance(n1: object, n2: object) -> float:
  function version_distance (line 38) | def version_distance(v1: object, v2: object) -> float:
  function parse_version (line 65) | def parse_version(ver_str: object) -> packaging.version.Version | None:

FILE: src/nixgraph/graph.py
  class LoadedDependencies (line 23) | class LoadedDependencies:
  function load_dependencies (line 31) | def load_dependencies(nix_path, buildtime=False):
  function draw_dependencies (line 56) | def draw_dependencies(loaded, args):

FILE: src/nixgraph/main.py
  function getargs (line 20) | def getargs(args=None):
  function main (line 80) | def main():
  function _run (line 91) | def _run(args):

FILE: src/nixgraph/render.py
  class NixDependencyGraph (line 26) | class NixDependencyGraph:
    method __init__ (line 29) | def __init__(self, df_dependencies):
    method draw (line 39) | def draw(self, start_path, args):
    method _walk_rows (line 75) | def _walk_rows(self, start_path):
    method _walked_rows_to_dataframe (line 96) | def _walked_rows_to_dataframe(self, walked_rows):
    method _draw_row (line 102) | def _draw_row(self, row, depth):
    method _init_df_out (line 111) | def _init_df_out(self, args):
    method _render (line 122) | def _render(self, filename):
    method _warn_if_large_graphviz_render (line 136) | def _warn_if_large_graphviz_render(self, filename, edge_count):
    method _matches_until (line 148) | def _matches_until(self, row):
    method _add_edge (line 151) | def _add_edge(self, row):
    method _add_node (line 158) | def _add_node(self, path, pname):
    method _dbg_print_row (line 179) | def _dbg_print_row(self, row: dict[str, Any], depth):

FILE: src/nixmeta/flake_metadata.py
  function get_flake_metadata (line 15) | def get_flake_metadata(flakeref, *, exec_cmd_fn=exec_cmd, nix_cmd_fn=nix...
  function is_nixpkgs_metadata (line 30) | def is_nixpkgs_metadata(meta_json):
  function _locked_obj_is_nixpkgs (line 51) | def _locked_obj_is_nixpkgs(node_name, locked_obj):
  function _input_node_names (line 62) | def _input_node_names(value):
  function _get_flake_nixpkgs_obj (line 71) | def _get_flake_nixpkgs_obj(meta_json):
  function _get_flake_nixpkgs_val (line 98) | def _get_flake_nixpkgs_val(meta_json, key):
  function _get_nixpkgs_flakeref_github (line 108) | def _get_nixpkgs_flakeref_github(meta_json, *, log=LOG):
  function _get_nixpkgs_flakeref_git (line 121) | def _get_nixpkgs_flakeref_git(meta_json, *, log=LOG):
  function _get_nixpkgs_flakeref_path (line 131) | def _get_nixpkgs_flakeref_path(meta_json, *, log=LOG):
  function _get_nixpkgs_flakeref_tarball (line 139) | def _get_nixpkgs_flakeref_tarball(meta_json, *, log=LOG):
  function get_nixpkgs_flakeref (line 147) | def get_nixpkgs_flakeref(meta_json, *, log=LOG):
  function nixref_to_nixpkgs_path (line 162) | def nixref_to_nixpkgs_path(

FILE: src/nixmeta/main.py
  function _getargs (line 20) | def _getargs(args=None):
  function main (line 72) | def main():
  function _run (line 83) | def _run(args):

FILE: src/nixmeta/metadata_json.py
  function parse_meta_entry (line 15) | def parse_meta_entry(meta, key):
  function parse_json_metadata (line 27) | def parse_json_metadata(json_filename, *, log=LOG):

FILE: src/nixmeta/scanner.py
  function _run_nix_env_metadata (line 23) | def _run_nix_env_metadata(cmd, stdout):
  class NixMetaScanner (line 36) | class NixMetaScanner:
    method __init__ (line 39) | def __init__(self):
    method scan (line 42) | def scan(self, nixref):
    method scan_path (line 71) | def scan_path(self, nixpkgs_path):
    method scan_expression (line 80) | def scan_expression(self, expression, *, impure=False):
    method to_csv (line 99) | def to_csv(self, csv_path, append=False):
    method to_df (line 112) | def to_df(self):
    method _read_nixpkgs_meta (line 116) | def _read_nixpkgs_meta(
    method _drop_duplicates (line 146) | def _drop_duplicates(self):

FILE: src/nixupdate/nix_outdated.py
  function getargs (line 32) | def getargs(args=None):
  function _query_repology (line 80) | def _query_repology(sbompath):
  function _run_nix_visualize (line 84) | def _run_nix_visualize(target_path):
  function _nix_visualize_csv_to_df (line 93) | def _nix_visualize_csv_to_df(csvpath):
  function _generate_report_df (line 98) | def _generate_report_df(df_nv, df_repo):
  function _drop_newest_dups (line 102) | def _drop_newest_dups(df_con, df_cmp):
  function _report (line 106) | def _report(df, args):
  function _console_out_table (line 110) | def _console_out_table(table, local=False, buildtime=False):
  function main (line 117) | def main():
  function _run (line 128) | def _run(args):

FILE: src/nixupdate/nix_visualize.py
  function run_nix_visualize (line 17) | def run_nix_visualize(
  function nix_visualize_csv_to_df (line 36) | def nix_visualize_csv_to_df(csvpath):

FILE: src/nixupdate/pipeline.py
  class OutdatedScanData (line 23) | class OutdatedScanData:
  function query_repology (line 30) | def query_repology(sbompath, *, adapter=None, log=LOG):
  class OutdatedScanHooks (line 44) | class OutdatedScanHooks:
  function collect_outdated_scan_data (line 53) | def collect_outdated_scan_data(

FILE: src/nixupdate/report.py
  function generate_report_df (line 16) | def generate_report_df(df_nix_visualize, df_repology, *, log=LOG, log_sp...
  function drop_newest_duplicates (line 38) | def drop_newest_duplicates(df_console, df_compare, *, log=LOG):
  function console_out_table (line 53) | def console_out_table(table, *, local=False, buildtime=False, log=LOG):
  function write_report (line 72) | def write_report(df, args, *, log=LOG):

FILE: src/provenance/dependencies.py
  class DependencyHooks (line 28) | class DependencyHooks:
  function derivation_outputs_by_path (line 40) | def derivation_outputs_by_path(infos, hooks=None):
  function dependency_paths (line 58) | def dependency_paths(drv_path, recursive=False, outputs_by_path=None, ho...
  function dependency_package (line 84) | def dependency_package(drv, output_hash, infos, outputs_by_path, hooks=N...
  function get_dependencies (line 111) | def get_dependencies(drv_path, recursive=False, hooks=None):

FILE: src/provenance/digests.py
  function canonical_hash_algo (line 22) | def canonical_hash_algo(hash_algo):
  function hash_size_bytes (line 29) | def hash_size_bytes(hash_algo):
  function decode_nix32 (line 37) | def decode_nix32(hash_value, size_bytes):
  function decode_hash_bytes (line 54) | def decode_hash_bytes(hash_value, hash_algo):
  function split_hash_value (line 78) | def split_hash_value(hash_value, hash_algo=None):
  function normalize_digest (line 102) | def normalize_digest(hash_value, hash_algo=None):
  function output_digest (line 120) | def output_digest(data, *, normalize_digest_fn=normalize_digest):

FILE: src/provenance/main.py
  class BuildMeta (line 32) | class BuildMeta:
  function get_env_metadata (line 44) | def get_env_metadata():
  function provenance (line 67) | def provenance(target: str, metadata: BuildMeta, recursive: bool = False...
  function getargs (line 108) | def getargs(args=None):
  function main (line 136) | def main():

FILE: src/provenance/nix_commands.py
  function exec_required_nix_command (line 12) | def exec_required_nix_command(cmd, exec_cmd_fn):

FILE: src/provenance/path_info.py
  function query_path_info (line 20) | def query_path_info(
  function query_path_hashes (line 52) | def query_path_hashes(paths, *, exec_cmd_fn=exec_cmd):
  function nar_hash_for_path (line 75) | def nar_hash_for_path(path_infos, path):

FILE: src/provenance/schema.py
  class ProvenanceMetadata (line 25) | class ProvenanceMetadata(Protocol):
  function get_external_parameters (line 37) | def get_external_parameters(metadata: ProvenanceMetadata) -> JsonDict:
  function get_internal_parameters (line 43) | def get_internal_parameters(metadata: ProvenanceMetadata) -> JsonDict:
  function timestamp (line 49) | def timestamp(unix_time: str) -> str:
  class SchemaHooks (line 63) | class SchemaHooks:
  function provenance_document (line 77) | def provenance_document(

FILE: src/provenance/subjects.py
  function output_path (line 24) | def output_path(
  class SubjectHooks (line 37) | class SubjectHooks:
  function get_subjects (line 47) | def get_subjects(

FILE: src/repology/adapter.py
  class RepologyQuery (line 37) | class RepologyQuery:
    method __post_init__ (line 49) | def __post_init__(self):
  function repo_row_classify (line 66) | def repo_row_classify(row):
  class RepologyAdapter (line 73) | class RepologyAdapter:
    method __init__ (line 76) | def __init__(self, session=None, request_timeout=REPOLOGY_REQUEST_TIME...
    method _reset_state (line 82) | def _reset_state(self):
    method _packages_to_df (line 89) | def _packages_to_df(self, query, re_pkg_internal=None):
    method _append_package_rows (line 114) | def _append_package_rows(self, package_rows):
    method _get_resp (line 119) | def _get_resp(self, url):
    method query_cves (line 129) | def query_cves(self, pkg_name, pkg_version):
    method _query_pkg_search (line 143) | def _query_pkg_search(self, pkg_search, repository, stop_pkg=None):
    method _query_pkg_exact (line 170) | def _query_pkg_exact(self, pkg_name, repository):
    method _query_sbom_cdx (line 173) | def _query_sbom_cdx(self, query):
    method query (line 231) | def query(self, query):

FILE: src/repology/cves.py
  function is_affected (line 19) | def is_affected(version, affected_ver_str, *, log=LOG, log_spam=LOG_SPAM):
  function parse_cve_html (line 71) | def parse_cve_html(html_text, pkg_name, pkg_version, *, log=LOG, log_spa...

FILE: src/repology/exceptions.py
  class RepologyError (line 8) | class RepologyError(Exception):
  class RepologyNoMatchingPackages (line 14) | class RepologyNoMatchingPackages(RepologyError):
  class RepologyUnexpectedResponse (line 20) | class RepologyUnexpectedResponse(RepologyError):

FILE: src/repology/projects_parser.py
  class ParsedProjectsPage (line 18) | class ParsedProjectsPage:
  function parse_projects_search_html (line 26) | def parse_projects_search_html(  # noqa: PLR0912, PLR0914, PLR0915

FILE: src/repology/repology_cli.py
  function _pkg_str (line 22) | def _pkg_str(str_obj):
  function getargs (line 28) | def getargs(args=None):
  function _query_from_args (line 86) | def _query_from_args(args):
  class Repology (line 99) | class Repology:
    method __init__ (line 102) | def __init__(self, adapter=None):
    method query (line 108) | def query(self, args, stdout_report=True, file_report=True):
  function main (line 129) | def main():

FILE: src/repology/repology_cve.py
  function _pkg_str (line 22) | def _pkg_str(str_obj):
  function getargs (line 28) | def getargs(args=None):
  function query_cve (line 51) | def query_cve(
  function main (line 65) | def main():

FILE: src/repology/reporting.py
  function _stats_sbom (line 14) | def _stats_sbom(df, *, log=LOG):  # noqa: PLR0914
  function _stats_repology (line 58) | def _stats_repology(df, *, log=LOG):  # noqa: PLR0914
  function report_cves (line 104) | def report_cves(df, *, log=LOG):
  function write_query_report (line 120) | def write_query_report(  # noqa: PLR0913

FILE: src/repology/sbom.py
  function parse_cdx_sbom (line 34) | def parse_cdx_sbom(path):
  function is_ignored_sbom_package (line 58) | def is_ignored_sbom_package(package_name):
  function make_sbom_status_row (line 63) | def make_sbom_status_row(repository, package, version, status):
  function merge_sbom_fields (line 75) | def merge_sbom_fields(df_sbom, df_repo):
  function sbom_row_classify (line 90) | def sbom_row_classify(row):

FILE: src/repology/session.py
  function create_repology_session (line 14) | def create_repology_session():

FILE: src/sbomnix/builder.py
  class StructuredClosure (line 53) | class StructuredClosure:
  function _runtime_output_paths_by_load_path (line 61) | def _runtime_output_paths_by_load_path(output_paths_by_drv):
  function _mapped_runtime_output_paths (line 72) | def _mapped_runtime_output_paths(output_paths_by_load_path):
  class SbomBuilder (line 78) | class SbomBuilder:
    method __init__ (line 81) | def __init__(  # noqa: PLR0913, PLR0917
    method _resolve_target_deriver (line 138) | def _resolve_target_deriver(self, nix_path):
    method _load_structured_closure (line 153) | def _load_structured_closure(self, nix_path):
    method _init_dependencies (line 161) | def _init_dependencies(self, closure):
    method _load_recursive_buildtime_closure (line 169) | def _load_recursive_buildtime_closure(self):
    method _load_runtime_path_info_closure (line 186) | def _load_runtime_path_info_closure(self, nix_path):
    method _init_runtime_components (line 215) | def _init_runtime_components(self, paths):
    method _filter_dependencies_to_depth (line 227) | def _filter_dependencies_to_depth(
    method _init_components (line 238) | def _init_components(self, include_meta):
    method _sbom_component_paths (line 263) | def _sbom_component_paths(self):
    method _resolve_target_component_ref (line 274) | def _resolve_target_component_ref(self) -> str:
    method _init_dependency_index (line 299) | def _init_dependency_index(self):
    method _join_meta (line 308) | def _join_meta(self):
    method lookup_dependencies (line 346) | def lookup_dependencies(self, drv, uid=cols.STORE_PATH):
    method to_cdx_data (line 353) | def to_cdx_data(self):
    method enrich_cdx_with_vulnerabilities (line 357) | def enrich_cdx_with_vulnerabilities(self, cdx):
    method to_spdx_data (line 361) | def to_spdx_data(self):
    method write_json (line 365) | def write_json(self, pathname, data, printinfo=False):
    method to_cdx (line 369) | def to_cdx(self, cdx_path, printinfo=True):
    method to_spdx (line 374) | def to_spdx(self, spdx_path, printinfo=True):
    method to_csv (line 379) | def to_csv(self, csv_path, loglevel=logging.INFO):

FILE: src/sbomnix/cdx.py
  function _drv_to_cdx_licenses_entry (line 17) | def _drv_to_cdx_licenses_entry(drv, column_name, cdx_license_type):
  function _cdx_component_add_licenses (line 44) | def _cdx_component_add_licenses(component, drv):
  function _cdx_component_add_patches (line 60) | def _cdx_component_add_patches(component, drv):
  function _drv_to_cdx_component (line 88) | def _drv_to_cdx_component(drv, uid=cols.STORE_PATH):
  function _drv_to_cdx_dependency (line 142) | def _drv_to_cdx_dependency(drv, deps_list, uid=cols.STORE_PATH):
  function _vuln_to_cdx_vuln (line 151) | def _vuln_to_cdx_vuln(vuln):

FILE: src/sbomnix/cli_utils.py
  class ResolvedNixTarget (line 28) | class ResolvedNixTarget:
  class GeneratedSbom (line 37) | class GeneratedSbom:
    method cleanup (line 43) | def cleanup(self):
  function resolve_nix_target (line 50) | def resolve_nix_target(nixref, buildtime=False, impure=False):
  function _realise_derivation_output (line 75) | def _realise_derivation_output(path):
  function _normalize_nixos_configuration_ref (line 96) | def _normalize_nixos_configuration_ref(nixref):
  function generate_temp_sbom (line 105) | def generate_temp_sbom(

FILE: src/sbomnix/closure.py
  class DependencyWalkRow (line 29) | class DependencyWalkRow:
  function dependency_paths (line 36) | def dependency_paths(df_deps):
  function dependencies_to_depth (line 45) | def dependencies_to_depth(df_deps, start_path, depth, columns=DEPENDENCY...
  function walk_dependency_rows (line 53) | def walk_dependency_rows(
  function _dependency_rows_by_path (line 95) | def _dependency_rows_by_path(df_deps, match_column):
  function derivation_dependencies_df (line 103) | def derivation_dependencies_df(drv_infos):
  function dependency_rows_to_dataframe (line 119) | def dependency_rows_to_dataframe(rows, columns=DEPENDENCY_COLUMNS):
  function store_path_label (line 131) | def store_path_label(path):
  function _iter_input_paths (line 138) | def _iter_input_paths(drv_info, target_path=None):

FILE: src/sbomnix/components.py
  function recursive_derivations_to_dataframe (line 16) | def recursive_derivations_to_dataframe(paths, derivations, include_cpe=T...
  function runtime_derivations_to_dataframe (line 28) | def runtime_derivations_to_dataframe(
  function derivations_to_dataframe (line 44) | def derivations_to_dataframe(derivations, include_cpe=True):
  function filter_runtime_outputs_by_load_path (line 54) | def filter_runtime_outputs_by_load_path(paths, output_paths_by_load_path):

FILE: src/sbomnix/cpe.py
  class CPE (line 24) | class CPE:
    method __init__ (line 27) | def __init__(
    method _init_product_vendor_index (line 59) | def _init_product_vendor_index(self):
    method _cpedict_vendor (line 75) | def _cpedict_vendor(self, product):
    method _candidate_vendor (line 99) | def _candidate_vendor(self, product):
    method generate (line 121) | def generate(self, name, version):

FILE: src/sbomnix/dependency_index.py
  function _sorted_unique (line 16) | def _sorted_unique(values):
  function _normalize_outputs (line 20) | def _normalize_outputs(outputs):
  function _group_dependency_rows (line 28) | def _group_dependency_rows(df, dep_col):
  class DependencyIndex (line 38) | class DependencyIndex:
    method lookup (line 45) | def lookup(self, drv, uid=cols.STORE_PATH):
    method _get_uid_map (line 67) | def _get_uid_map(self, uid):
  function build_dependency_index (line 82) | def build_dependency_index(df_deps, df_sbomdb, df_sbomdb_outputs_explode...

FILE: src/sbomnix/derivation.py
  function _batched (line 24) | def _batched(iterable, size):
  function load (line 30) | def load(path, outpath):
  function load_many (line 54) | def load_many(paths, output_paths_by_drv=None, batch_size=200, ignore_mi...
  function _load_derivation_infos (line 105) | def _load_derivation_infos(paths, store_path_hint=None, ignore_missing=F...
  function _query_paths_to_derivations (line 132) | def _query_paths_to_derivations(query_paths, drv_infos):
  function _derivation_output_paths (line 149) | def _derivation_output_paths(drv_info):
  function load_recursive (line 175) | def load_recursive(path):
  function _exec_required_nix_command (line 196) | def _exec_required_nix_command(cmd):
  function destructure (line 207) | def destructure(env):
  class Derive (line 214) | class Derive:
    method __init__ (line 217) | def __init__(
    method from_nix_derivation_info (line 268) | def from_nix_derivation_info(cls, path, drv_info, outpath=None):
    method init (line 293) | def init(self, path, outpath):
    method __repr__ (line 302) | def __repr__(self):
    method set_cpe (line 305) | def set_cpe(self, cpe_generator):
    method add_output_path (line 310) | def add_output_path(self, path):
    method _refresh_purl (line 316) | def _refresh_purl(self):
    method to_dict (line 323) | def to_dict(self):
  function _derivation_output_path (line 331) | def _derivation_output_path(outputs, output_name):
  function _coerce_derivation_string (line 340) | def _coerce_derivation_string(value):
  function _set_derivation_output_paths (line 346) | def _set_derivation_output_paths(drv, outputs, env_vars):

FILE: src/sbomnix/derivers.py
  function is_loadable_deriver_path (line 17) | def is_loadable_deriver_path(path):
  function find_deriver (line 27) | def find_deriver(path):
  function require_deriver (line 59) | def require_deriver(path, *, find_deriver_fn=find_deriver, log=LOG):

FILE: src/sbomnix/dfcache.py
  class LockedDfCache (line 23) | class LockedDfCache:
    method __init__ (line 26) | def __init__(self):
    method __getattr__ (line 29) | def __getattr__(self, name):

FILE: src/sbomnix/exporters.py
  function write_json (line 29) | def write_json(pathname, data, printinfo=False):
  function _nixpkgs_meta_source_properties (line 38) | def _nixpkgs_meta_source_properties(sbomdb):
  function _spdx_nixpkgs_meta_source_comment (line 51) | def _spdx_nixpkgs_meta_source_comment(sbomdb):
  function build_cdx_document (line 66) | def build_cdx_document(sbomdb):
  function _str_to_spdxid (line 106) | def _str_to_spdxid(strval):
  function _drv_to_spdx_license_list (line 116) | def _drv_to_spdx_license_list(drv):
  function _drv_to_spdx_extrefs (line 133) | def _drv_to_spdx_extrefs(drv):
  function _drv_to_spdx_package (line 150) | def _drv_to_spdx_package(drv, uid=cols.STORE_PATH):
  function _drv_to_spdx_relationships (line 176) | def _drv_to_spdx_relationships(drv, deps_list, uid=cols.STORE_PATH):
  function build_spdx_document (line 192) | def build_spdx_document(sbomdb):

FILE: src/sbomnix/main.py
  function getargs (line 20) | def getargs(args=None):
  function main (line 80) | def main():
  function _run (line 91) | def _run(args):

FILE: src/sbomnix/meta.py
  class Meta (line 44) | class Meta:
    method __init__ (line 47) | def __init__(self):
    method get_nixpkgs_meta (line 52) | def get_nixpkgs_meta(self, nixref=None):
    method get_nixpkgs_meta_with_source (line 61) | def get_nixpkgs_meta_with_source(
    method _resolve_source (line 80) | def _resolve_source(
    method _scan_source (line 108) | def _scan_source(self, source):
    method _scan_source_with_source (line 112) | def _scan_source_with_source(self, source):
    method _scan_expression (line 138) | def _scan_expression(self, expression, *, cache_key=None, impure=False):
    method _try_scan_expression (line 162) | def _try_scan_expression(expression, *, impure=False):
    method _scan (line 171) | def _scan(self, nixpkgs_path):

FILE: src/sbomnix/meta_source.py
  class NixpkgsMetaSource (line 34) | class NixpkgsMetaSource:
  function classify_meta_nixpkgs (line 48) | def classify_meta_nixpkgs(value):
  function read_nixpkgs_version (line 55) | def read_nixpkgs_version(nixpkgs_path):
  function is_nix_store_path (line 67) | def is_nix_store_path(path):
  function nixpkgs_meta_source_with_path (line 72) | def nixpkgs_meta_source_with_path(source):
  class NixpkgsMetaSourceResolver (line 79) | class NixpkgsMetaSourceResolver:
    method path_target_without_source (line 83) | def path_target_without_source(target_path=None, original_ref=None):
    method resolve_meta_nixpkgs_option (line 99) | def resolve_meta_nixpkgs_option(self, meta_nixpkgs, *, target_path=None):
    method resolve_flakeref_target_source (line 113) | def resolve_flakeref_target_source(self, flakeref, *, impure=False):
    method _nixos_toplevel_without_source (line 146) | def _nixos_toplevel_without_source():
    method _parse_nixos_toplevel_flakeref (line 157) | def _parse_nixos_toplevel_flakeref(flakeref):
    method _nixos_pkgs_expression (line 164) | def _nixos_pkgs_expression(flake, name):
    method _flake_ref_for_expression (line 174) | def _flake_ref_for_expression(self, flake, *, impure=False):
    method _flake_ref_has_stable_lock (line 184) | def _flake_ref_has_stable_lock(flake):
    method _should_lock_flake_ref_for_expression (line 188) | def _should_lock_flake_ref_for_expression(cls, flake):
    method _is_existing_local_flake_ref (line 196) | def _is_existing_local_flake_ref(flake):
    method _locked_flake_ref_from_metadata (line 205) | def _locked_flake_ref_from_metadata(flake, *, impure=False):
    method _nix_flake_metadata (line 227) | def _nix_flake_metadata(flake, *, impure=False):
    method _normalize_local_flake_ref_for_expression (line 245) | def _normalize_local_flake_ref_for_expression(flake):
    method _nixos_pkgs_expression_cache_key (line 260) | def _nixos_pkgs_expression_cache_key(cls, flake, name, *, impure=False):
    method _stable_flake_ref_for_expression_cache (line 270) | def _stable_flake_ref_for_expression_cache(flake):
    method _nix_eval_raw (line 280) | def _nix_eval_raw(flakeref, *, impure=False):
    method resolve_explicit_source (line 295) | def resolve_explicit_source(self, meta_nixpkgs):
    method _try_normalize_mutable_path (line 339) | def _try_normalize_mutable_path(path):
    method resolve_flakeref_lock_source (line 353) | def resolve_flakeref_lock_source(self, nixref):
    method resolve_default_source (line 368) | def resolve_default_source(self, nixref=None):
    method resolve_nix_path_source (line 376) | def resolve_nix_path_source(self, *, message=None, required=False):

FILE: src/sbomnix/runtime.py
  class RuntimeClosure (line 31) | class RuntimeClosure:
  function load_runtime_closure (line 38) | def load_runtime_closure(path):
  function runtime_closure_from_path_info (line 59) | def runtime_closure_from_path_info(path_info):

FILE: src/sbomnix/vuln_enrichment.py
  function enrich_cdx_with_vulnerabilities (line 18) | def enrich_cdx_with_vulnerabilities(sbomdb, cdx):
  function _vulnix_target_path (line 69) | def _vulnix_target_path(sbomdb):

FILE: src/vulnxscan/github_prs.py
  function append_search_results (line 21) | def append_search_results(prs, result, max_results=5):
  class GitHubPrLookup (line 36) | class GitHubPrLookup:
    method __init__ (line 39) | def __init__(
    method query (line 58) | def query(self, query_str, delay=60):
    method find_nixpkgs_prs (line 78) | def find_nixpkgs_prs(self, row):

FILE: src/vulnxscan/osv.py
  function getargs (line 22) | def getargs(args=None):
  function _run (line 41) | def _run(args):
  function main (line 51) | def main():

FILE: src/vulnxscan/osv_client.py
  function create_osv_session (line 21) | def create_osv_session():
  class OSV (line 31) | class OSV:
    method __init__ (line 34) | def __init__(self, session=None, request_timeout=OSV_REQUEST_TIMEOUT):
    method _parse_vulns (line 39) | def _parse_vulns(self, package, vulns):
    method _parse_batch_response (line 47) | def _parse_batch_response(self, query, results):
    method _post_batch_query (line 58) | def _post_batch_query(self, query):
    method _parse_sbom (line 72) | def _parse_sbom(self, path):
    method query_vulns (line 93) | def query_vulns(self, sbom_path, ecosystems=None):
    method to_dataframe (line 122) | def to_dataframe(self):

FILE: src/vulnxscan/parsers.py
  function _severity_from_cache (line 18) | def _severity_from_cache(cvss_cache, vuln_id):
  function parse_vulnix_json (line 24) | def parse_vulnix_json(json_str, *, cvss_cache=None, log=LOG):
  function parse_grype_json (line 50) | def parse_grype_json(json_str, *, cvss_cache=None, log=LOG, log_spam=LOG...
  function normalize_osv_dataframe (line 87) | def normalize_osv_dataframe(df_osv, *, cvss_cache=None, log=LOG, log_spa...

FILE: src/vulnxscan/repology_lookup.py
  function select_newest (line 23) | def select_newest(df):
  function _add_triage_item (line 37) | def _add_triage_item(out_dict, vuln, whitelist_cols, df_repo=None):
  function _version_similarity (line 75) | def _version_similarity(row):
  class RepologyVulnerabilityLookup (line 87) | class RepologyVulnerabilityLookup:
    method __init__ (line 90) | def __init__(self, adapter=None, cve_query=None):
    method is_vulnerable (line 96) | def is_vulnerable(self, repo_pkg_name, pkg_version, cve_id=None):
    method query_repology (line 116) | def query_repology(self, pname, match_type="pkg_exact"):
    method query_repology_versions (line 155) | def query_repology_versions(self, df_vuln_pkgs):

FILE: src/vulnxscan/reporting.py
  function build_report_dataframe (line 22) | def build_report_dataframe(df_vulnix, df_grype, df_osv, *, log=LOG):
  function filter_patched_report (line 68) | def filter_patched_report(df_report, sbom_csv, *, log=LOG):
  function apply_whitelist_annotations (line 86) | def apply_whitelist_annotations(df_report, whitelist_csv):
  function render_console_report (line 96) | def render_console_report(df_report, *, df_triaged=None, log=LOG):
  function write_reports (line 128) | def write_reports(df_report, out_path, *, df_triaged=None):

FILE: src/vulnxscan/scanners.py
  function run_vulnix_scan (line 14) | def run_vulnix_scan(target_path, buildtime=False, *, exec_cmd_fn=exec_cm...
  function run_grype_scan (line 29) | def run_grype_scan(sbom_path, *, exec_cmd_fn=exec_cmd, log=LOG):
  function run_osv_scan (line 36) | def run_osv_scan(sbom_path, *, osv_factory=OSV, log=LOG):

FILE: src/vulnxscan/triage.py
  function _get_default_repology_lookup (line 20) | def _get_default_repology_lookup():
  function _get_default_github_lookup (line 27) | def _get_default_github_lookup():
  function classify_vulnerability (line 34) | def classify_vulnerability(row, repology_lookup=None):  # noqa: PLR0911
  function triage_vulnerabilities (line 65) | def triage_vulnerabilities(

FILE: src/vulnxscan/utils.py
  function _reformat_scanner (line 19) | def _reformat_scanner(val):
  function _vuln_sortcol (line 25) | def _vuln_sortcol(row):
  function _vuln_url (line 39) | def _vuln_url(row):
  function _vuln_source (line 49) | def _vuln_source(row):
  function _is_patched (line 57) | def _is_patched(row):
  function _is_json (line 66) | def _is_json(path):

FILE: src/vulnxscan/vulnscan.py
  class VulnScan (line 25) | class VulnScan:
    method __init__ (line 28) | def __init__(self):
    method _parse_vulnix (line 37) | def _parse_vulnix(self, json_str):
    method scan_vulnix (line 47) | def scan_vulnix(self, target_path, buildtime=False):
    method _parse_grype (line 63) | def _parse_grype(self, json_str):
    method scan_grype (line 74) | def scan_grype(self, sbom_path):
    method _parse_osv (line 84) | def _parse_osv(self, df_osv):
    method scan_osv (line 95) | def scan_osv(self, sbom_path):
    method _generate_report (line 100) | def _generate_report(self):
    method _filter_patched (line 126) | def _filter_patched(self, sbom_csv):
    method _apply_whitelist (line 133) | def _apply_whitelist(self, whitelist_csv):
    method _console_report (line 136) | def _console_report(self):
    method report (line 143) | def report(self, args, sbom_csv):

FILE: src/vulnxscan/vulnxscan_cli.py
  function getargs (line 27) | def getargs(args=None):
  function main (line 95) | def main():
  function _run (line 106) | def _run(args):

FILE: src/vulnxscan/whitelist.py
  function load_whitelist (line 21) | def load_whitelist(whitelist_csv_path):
  function df_apply_whitelist (line 45) | def df_apply_whitelist(df_whitelist, df_vulns):
  function df_drop_whitelisted (line 93) | def df_drop_whitelisted(df):

FILE: tests/compare_deps.py
  function getargs (line 25) | def getargs():
  function _parse_sbom (line 44) | def _parse_sbom(path):
  function _parse_graph (line 101) | def _parse_graph(path):
  function _filter_set (line 111) | def _filter_set(re_filter_out_list, target_set):
  function sbom_internal_checks (line 124) | def sbom_internal_checks(df_sbom):
  function compare_dependencies (line 144) | def compare_dependencies(df_sbom, df_graph, sbom_type, graph_type):
  function main (line 256) | def main():

FILE: tests/compare_sboms.py
  function getargs (line 24) | def getargs():
  function _sbom_df_from_dict (line 48) | def _sbom_df_from_dict(dict_obj):
  function _parse_sbom_cdx (line 57) | def _parse_sbom_cdx(json_dict):
  function _parse_sbom_spdx (line 68) | def _parse_sbom_spdx(json_dict):
  function _parse_sbom (line 79) | def _parse_sbom(path):
  function _log_rows (line 93) | def _log_rows(df, name):
  function _compare_sboms (line 98) | def _compare_sboms(args, df1, df2):
  function main (line 160) | def main():

FILE: tests/conftest.py
  function _output_mentions_repology_host (line 27) | def _output_mentions_repology_host(output):
  function _pythonpath_with_repo_root (line 36) | def _pythonpath_with_repo_root(env):
  function _warm_grype_db (line 49) | def _warm_grype_db(request, tmp_path_factory):
  function _configure_test_vulnix (line 79) | def _configure_test_vulnix(request, tmp_path_factory):
  function fixture_test_work_dir (line 115) | def fixture_test_work_dir(tmp_path):
  function fixture_test_nix_drv (line 121) | def fixture_test_nix_drv():
  function fixture_test_nix_result (line 136) | def fixture_test_nix_result(test_nix_drv, tmp_path_factory):
  function fixture_test_cdx_sbom (line 147) | def fixture_test_cdx_sbom():
  function fixture_run_python_script (line 155) | def fixture_run_python_script(test_work_dir, _warm_grype_db, _configure_...
  function fixture_run_python_script_retry_on_repology_network_error (line 176) | def fixture_run_python_script_retry_on_repology_network_error(_run_pytho...
  function pytest_collection_modifyitems (line 216) | def pytest_collection_modifyitems(items):

FILE: tests/integration/test_nixgraph_cli.py
  function _write_nixgraph_test_flake (line 17) | def _write_nixgraph_test_flake(flake_dir):
  function test_nixgraph_help (line 70) | def test_nixgraph_help(_run_python_script):
  function test_nixgraph_png (line 75) | def test_nixgraph_png(_run_python_script, test_nix_result, test_work_dir):
  function test_nixgraph_csv (line 82) | def test_nixgraph_csv(_run_python_script, test_nix_result, test_work_dir):
  function test_nixgraph_csv_runtime_drv (line 91) | def test_nixgraph_csv_runtime_drv(_run_python_script, test_nix_drv, test...
  function test_nixgraph_csv_buildtime (line 104) | def test_nixgraph_csv_buildtime(_run_python_script, test_nix_drv, test_w...
  function test_nixgraph_dot_includes_edges_labels_and_style (line 113) | def test_nixgraph_dot_includes_edges_labels_and_style(
  function test_nixgraph_depth_and_until_limit_traversal (line 139) | def test_nixgraph_depth_and_until_limit_traversal(
  function test_nixgraph_csv_runtime_flakeref (line 174) | def test_nixgraph_csv_runtime_flakeref(_run_python_script, test_work_dir):
  function test_nixgraph_csv_buildtime_flakeref (line 187) | def test_nixgraph_csv_buildtime_flakeref(_run_python_script, test_work_d...
  function test_nixgraph_csv_graph_inverse (line 202) | def test_nixgraph_csv_graph_inverse(_run_python_script, test_nix_result,...
  function test_compare_deps_runtime (line 239) | def test_compare_deps_runtime(_run_python_script, test_nix_result, test_...
  function test_compare_deps_buildtime (line 276) | def test_compare_deps_buildtime(_run_python_script, test_nix_drv, test_w...

FILE: tests/integration/test_nixmeta_cli.py
  function test_nixmeta_help (line 14) | def test_nixmeta_help(_run_python_script):
  function test_nixmeta_sbomnix_flakeref (line 20) | def test_nixmeta_sbomnix_flakeref(_run_python_script, test_work_dir):

FILE: tests/integration/test_nixupdate_cli.py
  function test_nix_outdated_help (line 13) | def test_nix_outdated_help(_run_python_script):
  function test_nix_outdated_result (line 20) | def test_nix_outdated_result(

FILE: tests/integration/test_provenance_cli.py
  function test_provenance_help (line 12) | def test_provenance_help(_run_python_script):
  function test_provenance_schema (line 17) | def test_provenance_schema(_run_python_script, test_nix_drv, test_work_d...
  function test_provenance_schema_recursive (line 34) | def test_provenance_schema_recursive(_run_python_script, test_nix_drv, t...

FILE: tests/integration/test_repology_cli.py
  function test_repology_cli_help (line 13) | def test_repology_cli_help(_run_python_script):
  function test_repology_cli_sbom (line 20) | def test_repology_cli_sbom(

FILE: tests/integration/test_sbomnix_cli.py
  function test_sbomnix_help (line 15) | def test_sbomnix_help(_run_python_script):
  function test_sbomnix_type_runtime (line 20) | def test_sbomnix_type_runtime(_run_python_script, test_nix_result, test_...
  function test_sbomnix_type_buildtime (line 47) | def test_sbomnix_type_buildtime(_run_python_script, test_nix_drv, test_w...
  function test_sbomnix_depth (line 75) | def test_sbomnix_depth(_run_python_script, test_nix_drv, test_work_dir):
  function test_compare_subsequent_cdx_sboms (line 114) | def test_compare_subsequent_cdx_sboms(_run_python_script, test_nix_drv, ...
  function test_compare_subsequent_spdx_sboms (line 144) | def test_compare_subsequent_spdx_sboms(_run_python_script, test_nix_drv,...
  function test_compare_spdx_and_cdx_sboms (line 174) | def test_compare_spdx_and_cdx_sboms(_run_python_script, test_nix_drv, te...

FILE: tests/integration/test_vulnxscan_cli.py
  function test_vulnxscan_help (line 18) | def test_vulnxscan_help(_run_python_script):
  function test_vulnxscan_scan_nix_result (line 25) | def test_vulnxscan_scan_nix_result(_run_python_script, test_nix_result, ...
  function test_vulnxscan_scan_sbom (line 45) | def test_vulnxscan_scan_sbom(_run_python_script, test_cdx_sbom, test_wor...
  function test_vulnxscan_triage (line 61) | def test_vulnxscan_triage(_run_python_script, test_nix_result, test_work...
  function test_vulnxscan_triage_whitelist (line 81) | def test_vulnxscan_triage_whitelist(_run_python_script, test_nix_result,...

FILE: tests/resources/make_grype_test_db.py
  function build (line 95) | def build(db_path: Path) -> None:
  function main (line 130) | def main() -> None:

FILE: tests/test_builder_runtime.py
  function _builder_double (line 23) | def _builder_double():
  function _runtime_closure (line 36) | def _runtime_closure(output_paths_by_drv, rows=None):
  function test_runtime_path_info_dependencies_accepts_existing_derivers (line 43) | def test_runtime_path_info_dependencies_accepts_existing_derivers(monkey...
  function test_runtime_components_propagate_derivation_loading_failures (line 66) | def test_runtime_components_propagate_derivation_loading_failures(monkey...
  function test_runtime_components_reject_missing_derivation_metadata (line 85) | def test_runtime_components_reject_missing_derivation_metadata(monkeypat...
  function test_runtime_deriver_lookup_preserves_typed_errors (line 99) | def test_runtime_deriver_lookup_preserves_typed_errors(monkeypatch):
  function test_runtime_path_info_dependencies_uses_output_queries_for_unloadable_derivers (line 119) | def test_runtime_path_info_dependencies_uses_output_queries_for_unloadab...
  function test_runtime_path_info_dependencies_accepts_graph_only_references (line 145) | def test_runtime_path_info_dependencies_accepts_graph_only_references(mo...
  function test_runtime_path_info_dependencies_supports_targets_without_derivers (line 176) | def test_runtime_path_info_dependencies_supports_targets_without_derivers(
  function test_target_component_ref_uses_runtime_output_when_deriver_is_unavailable (line 196) | def test_target_component_ref_uses_runtime_output_when_deriver_is_unavai...
  function test_target_component_ref_skips_missing_outputs_when_deriver_is_unavailable (line 211) | def test_target_component_ref_skips_missing_outputs_when_deriver_is_unav...
  function test_target_component_ref_handles_non_identifier_output_column (line 230) | def test_target_component_ref_handles_non_identifier_output_column(monke...
  function test_target_component_ref_rejects_missing_runtime_target_metadata (line 246) | def test_target_component_ref_rejects_missing_runtime_target_metadata():

FILE: tests/test_buildtime_closure.py
  function test_recursive_buildtime_dependencies_df_reads_new_derivation_inputs (line 14) | def test_recursive_buildtime_dependencies_df_reads_new_derivation_inputs():
  function test_recursive_buildtime_dependencies_df_rejects_legacy_input_drvs (line 58) | def test_recursive_buildtime_dependencies_df_rejects_legacy_input_drvs():
  function test_recursive_buildtime_dependencies_df_rejects_missing_input_schema (line 71) | def test_recursive_buildtime_dependencies_df_rejects_missing_input_schem...
  function test_recursive_buildtime_dependencies_df_accepts_empty_modern_inputs (line 82) | def test_recursive_buildtime_dependencies_df_accepts_empty_modern_inputs():
  function test_recursive_buildtime_dependencies_df_rejects_missing_source_inputs (line 103) | def test_recursive_buildtime_dependencies_df_rejects_missing_source_inpu...
  function test_recursive_buildtime_dependencies_df_rejects_missing_derivation_inputs (line 116) | def test_recursive_buildtime_dependencies_df_rejects_missing_derivation_...

FILE: tests/test_cli_conventions.py
  function _stringify (line 25) | def _stringify(value):
  function test_cli_version_flags_exit_zero (line 61) | def test_cli_version_flags_exit_zero(getargs, capsys):
  function test_cli_verbose_default_is_normal_info (line 73) | def test_cli_verbose_default_is_normal_info(getargs, base_argv):
  function test_cli_verbose_level_one_forms_match (line 89) | def test_cli_verbose_level_one_forms_match(getargs, base_argv, verbose_a...
  function test_cli_verbose_level_two_forms_match (line 107) | def test_cli_verbose_level_two_forms_match(getargs, base_argv, verbose_a...
  function test_single_output_clis_accept_short_o_alias (line 143) | def test_single_output_clis_accept_short_o_alias(getargs, argv, expected...
  function test_dev_version_format_matches_nix_package_format (line 153) | def test_dev_version_format_matches_nix_package_format():
  function test_dev_version_parity_with_nix_package_version (line 197) | def test_dev_version_parity_with_nix_package_version():
  function test_repology_cli_uses_uppercase_v_for_version_filter (line 246) | def test_repology_cli_uses_uppercase_v_for_version_filter():

FILE: tests/test_cli_error_boundaries.py
  function test_vulnxscan_invalid_sbom_exits_nonzero (line 21) | def test_vulnxscan_invalid_sbom_exits_nonzero(tmp_path, monkeypatch):
  function test_osv_invalid_sbom_exits_nonzero (line 47) | def test_osv_invalid_sbom_exits_nonzero(tmp_path, monkeypatch):
  function test_cli_translates_sbomnix_errors_to_exit_code_1 (line 140) | def test_cli_translates_sbomnix_errors_to_exit_code_1(

FILE: tests/test_cli_smoke.py
  function test_repology_cve_main_writes_output_csv (line 17) | def test_repology_cve_main_writes_output_csv(tmp_path, monkeypatch):
  function test_osv_main_writes_output_csv_with_requested_ecosystems (line 63) | def test_osv_main_writes_output_csv_with_requested_ecosystems(tmp_path, ...

FILE: tests/test_common_log.py
  function test_set_log_verbosity_maps_cli_levels_to_logging_levels (line 26) | def test_set_log_verbosity_maps_cli_levels_to_logging_levels(verbosity, ...
  function test_custom_log_level_names_are_registered (line 34) | def test_custom_log_level_names_are_registered():
  function test_verbose_level_is_between_info_and_debug (line 40) | def test_verbose_level_is_between_info_and_debug():

FILE: tests/test_common_versioning.py
  function test_number_distance_documents_edge_cases (line 36) | def test_number_distance_documents_edge_cases(left, right, expected):
  function test_number_distance_is_symmetric_for_non_negative_numbers (line 43) | def test_number_distance_is_symmetric_for_non_negative_numbers(left, rig...
  function test_number_distance_is_bounded_for_non_negative_numbers (line 50) | def test_number_distance_is_bounded_for_non_negative_numbers(left, right):
  function test_number_distance_identity_for_non_negative_numbers (line 58) | def test_number_distance_identity_for_non_negative_numbers(value):
  function test_number_distance_returns_zero_for_negative_arguments (line 63) | def test_number_distance_returns_zero_for_negative_arguments(negative, v...
  function test_parse_version_normalizes_suffixes (line 68) | def test_parse_version_normalizes_suffixes():
  function test_parse_version_never_raises_for_text (line 75) | def test_parse_version_never_raises_for_text(value):
  function test_parse_version_is_idempotent_after_string_roundtrip (line 80) | def test_parse_version_is_idempotent_after_string_roundtrip(value):
  function test_version_distance_handles_identical_and_invalid_versions (line 87) | def test_version_distance_handles_identical_and_invalid_versions():
  function test_version_distance_is_bounded_for_text (line 93) | def test_version_distance_is_bounded_for_text(left, right):
  function test_nix_to_repology_pkg_name_handles_prefixes_and_special_cases (line 99) | def test_nix_to_repology_pkg_name_handles_prefixes_and_special_cases():

FILE: tests/test_compare_deps.py
  function test_compare_dependencies_filters_darwin_buildtime_source_paths (line 13) | def test_compare_dependencies_filters_darwin_buildtime_source_paths():

FILE: tests/test_components.py
  class FakeDrv (line 11) | class FakeDrv:
    method __init__ (line 14) | def __init__(self, store_path, name):
    method set_cpe (line 20) | def set_cpe(self, _generator):
    method to_dict (line 23) | def to_dict(self):
  function test_recursive_derivations_to_dataframe_skips_missing_paths (line 32) | def test_recursive_derivations_to_dataframe_skips_missing_paths():
  function test_runtime_derivations_to_dataframe_filters_outputs_before_loading (line 65) | def test_runtime_derivations_to_dataframe_filters_outputs_before_loading...

FILE: tests/test_cpe.py
  class FakeCache (line 13) | class FakeCache:
    method __init__ (line 14) | def __init__(self, df):
    method get (line 17) | def get(self, _url):
    method set (line 20) | def set(self, *_args, **_kwargs):
  function test_cpe_uses_indexed_unique_product_vendor (line 24) | def test_cpe_uses_indexed_unique_product_vendor(monkeypatch):
  function test_cpe_ambiguous_product_falls_back_to_product_name (line 43) | def test_cpe_ambiguous_product_falls_back_to_product_name(monkeypatch):

FILE: tests/test_dependency_index.py
  function test_build_dependency_index_combines_runtime_and_buildtime_edges (line 13) | def test_build_dependency_index_combines_runtime_and_buildtime_edges():
  function test_build_dependency_index_returns_none_without_dependencies (line 61) | def test_build_dependency_index_returns_none_without_dependencies():

FILE: tests/test_derivation_hardening.py
  function test_load_derivation_uses_nix_derivation_show (line 18) | def test_load_derivation_uses_nix_derivation_show(monkeypatch):
  function test_canonicalize_spdx_license_id_canonicalizes_aliases (line 73) | def test_canonicalize_spdx_license_id_canonicalizes_aliases():
  function test_cdx_and_spdx_license_exporters_use_canonical_spdx_ids (line 91) | def test_cdx_and_spdx_license_exporters_use_canonical_spdx_ids():
  function test_cdx_falls_back_to_license_short_name_when_spdx_id_is_invalid (line 151) | def test_cdx_falls_back_to_license_short_name_when_spdx_id_is_invalid():

FILE: tests/test_flakeref_resolution.py
  class CapturingLogger (line 24) | class CapturingLogger:
    method __init__ (line 25) | def __init__(self):
    method info (line 28) | def info(self, msg, *args):
    method log (line 31) | def log(self, level, msg, *args):
    method debug (line 34) | def debug(self, msg, *args):
  function test_nixos_configuration_attr_segments_use_nix_string_escaping (line 65) | def test_nixos_configuration_attr_segments_use_nix_string_escaping(name,...
  function test_nixos_configuration_parser_rejects_unescaped_interpolation (line 73) | def test_nixos_configuration_parser_rejects_unescaped_interpolation():
  function test_try_resolve_flakeref_uses_argv_lists (line 77) | def test_try_resolve_flakeref_uses_argv_lists():
  function test_try_resolve_flakeref_can_return_derivation_path (line 111) | def test_try_resolve_flakeref_can_return_derivation_path():
  function test_try_resolve_flakeref_logs_flake_progress_at_info (line 152) | def test_try_resolve_flakeref_logs_flake_progress_at_info():
  function test_try_resolve_flakeref_keeps_plain_path_probe_verbose (line 177) | def test_try_resolve_flakeref_keeps_plain_path_probe_verbose():
  function test_try_resolve_flakeref_raises_on_failed_force_realise (line 199) | def test_try_resolve_flakeref_raises_on_failed_force_realise():
  function test_try_resolve_flakeref_raises_when_force_realise_prints_no_path (line 211) | def test_try_resolve_flakeref_raises_when_force_realise_prints_no_path():
  function test_try_resolve_flakeref_raises_on_failed_eval_for_flakeref (line 223) | def test_try_resolve_flakeref_raises_on_failed_eval_for_flakeref():
  function test_try_resolve_flakeref_returns_none_for_non_flake_path (line 231) | def test_try_resolve_flakeref_returns_none_for_non_flake_path():
  function test_try_resolve_flakeref_returns_none_for_generated_plain_paths (line 248) | def test_try_resolve_flakeref_returns_none_for_generated_plain_paths(path):
  function test_try_resolve_flakeref_returns_none_for_missing_relative_paths (line 258) | def test_try_resolve_flakeref_returns_none_for_missing_relative_paths(pa...
  function test_try_resolve_flakeref_returns_none_for_existing_fragment_path_when_eval_fails (line 267) | def test_try_resolve_flakeref_returns_none_for_existing_fragment_path_wh...
  function test_try_resolve_flakeref_raises_for_generated_flakeref_failures (line 289) | def test_try_resolve_flakeref_raises_for_generated_flakeref_failures(fla...
  function test_try_resolve_flakeref_strips_generated_eval_output (line 308) | def test_try_resolve_flakeref_strips_generated_eval_output(flakeref):
  function test_flakeref_realisation_error_accepts_none_stderr (line 320) | def test_flakeref_realisation_error_accepts_none_stderr():
  function test_flake_ref_resolution_error_preserves_stderr_verbatim (line 327) | def test_flake_ref_resolution_error_preserves_stderr_verbatim():

FILE: tests/test_library_exceptions.py
  function test_df_from_csv_file_raises_csv_load_error (line 31) | def test_df_from_csv_file_raises_csv_load_error(monkeypatch):
  function test_df_log_ignores_none (line 41) | def test_df_log_ignores_none():
  function test_exit_unless_command_exists_raises_typed_error (line 45) | def test_exit_unless_command_exists_raises_typed_error():
  function test_exit_unless_nix_artifact_raises_typed_error (line 50) | def test_exit_unless_nix_artifact_raises_typed_error():
  function test_exit_unless_nix_artifact_uses_modern_nix_commands (line 61) | def test_exit_unless_nix_artifact_uses_modern_nix_commands():
  function test_find_deriver_raises_typed_error (line 97) | def test_find_deriver_raises_typed_error():
  function test_require_deriver_wraps_lookup_runtime_errors (line 102) | def test_require_deriver_wraps_lookup_runtime_errors():
  function test_require_deriver_preserves_typed_lookup_errors (line 113) | def test_require_deriver_preserves_typed_lookup_errors():
  function test_cpe_raises_typed_error_when_required_columns_are_missing (line 121) | def test_cpe_raises_typed_error_when_required_columns_are_missing(monkey...
  function test_df_apply_whitelist_raises_typed_error_without_vuln_id_column (line 135) | def test_df_apply_whitelist_raises_typed_error_without_vuln_id_column():
  function test_repology_cve_report_returns_false_on_empty_results (line 146) | def test_repology_cve_report_returns_false_on_empty_results():

FILE: tests/test_nix_cli_argv.py
  function test_exec_cmd_rejects_string_commands (line 20) | def test_exec_cmd_rejects_string_commands():
  function test_find_deriver_uses_argv_list (line 28) | def test_find_deriver_uses_argv_list(monkeypatch):
  function test_find_deriver_supports_nix_2_33_wrapped_json (line 72) | def test_find_deriver_supports_nix_2_33_wrapped_json(monkeypatch):
  function test_find_deriver_rejects_unloadable_structured_deriver (line 98) | def test_find_deriver_rejects_unloadable_structured_deriver(monkeypatch):
  function test_get_flake_metadata_uses_argv_list (line 141) | def test_get_flake_metadata_uses_argv_list():
  function test_get_flake_metadata_strips_nixpkgs_prefix_without_splitting_spaces (line 173) | def test_get_flake_metadata_strips_nixpkgs_prefix_without_splitting_spac...
  function test_run_nix_visualize_uses_argv_list (line 189) | def test_run_nix_visualize_uses_argv_list(tmp_path, monkeypatch):
  function test_meta_reads_nix_path_entry_with_spaces (line 231) | def test_meta_reads_nix_path_entry_with_spaces(monkeypatch):

FILE: tests/test_nix_outdated_pipeline.py
  class FakeSbomArtifact (line 19) | class FakeSbomArtifact:
    method __init__ (line 20) | def __init__(self, cdx_path):
    method cleanup (line 24) | def cleanup(self):
  function _repology_df (line 28) | def _repology_df():
  function _log_verbosity (line 45) | def _log_verbosity(verbosity):
  function test_collect_outdated_scan_data_runtime_uses_hooks_and_cleans_outputs (line 57) | def test_collect_outdated_scan_data_runtime_uses_hooks_and_cleans_output...
  function test_collect_outdated_scan_data_buildtime_skips_nix_visualize (line 120) | def test_collect_outdated_scan_data_buildtime_skips_nix_visualize(tmp_pa...
  function test_collect_outdated_scan_data_buildtime_debug_keeps_nix_visualize_optional (line 146) | def test_collect_outdated_scan_data_buildtime_debug_keeps_nix_visualize_...
  function test_generate_report_df_buildtime_adds_default_priority_and_renames_version (line 174) | def test_generate_report_df_buildtime_adds_default_priority_and_renames_...
  function test_write_report_defaults_to_nixpkgs_updates_and_drops_newest_duplicates (line 182) | def test_write_report_defaults_to_nixpkgs_updates_and_drops_newest_dupli...
  function test_write_report_local_buildtime_outputs_local_updates_without_priority (line 250) | def test_write_report_local_buildtime_outputs_local_updates_without_prio...

FILE: tests/test_nix_target_resolution.py
  function test_resolve_nix_target_preserves_flakeref_on_success (line 22) | def test_resolve_nix_target_preserves_flakeref_on_success(monkeypatch):
  function test_resolve_nix_target_requests_derivation_for_buildtime_flakeref (line 38) | def test_resolve_nix_target_requests_derivation_for_buildtime_flakeref(m...
  function test_resolve_nix_target_normalizes_plain_nixos_configuration (line 66) | def test_resolve_nix_target_normalizes_plain_nixos_configuration(monkeyp...
  function test_resolve_nix_target_normalizes_quoted_nixos_configuration (line 92) | def test_resolve_nix_target_normalizes_quoted_nixos_configuration(monkey...
  function test_resolve_nix_target_leaves_malformed_nixos_configuration_refs (line 130) | def test_resolve_nix_target_leaves_malformed_nixos_configuration_refs(
  function test_resolve_nix_target_propagates_flakeref_realisation_failure_without_path_probe (line 156) | def test_resolve_nix_target_propagates_flakeref_realisation_failure_with...
  function test_resolve_nix_target_propagates_flakeref_eval_failure_without_path_probe (line 184) | def test_resolve_nix_target_propagates_flakeref_eval_failure_without_pat...
  function test_resolve_nix_target_uses_plain_path_validation (line 212) | def test_resolve_nix_target_uses_plain_path_validation(monkeypatch):
  function test_resolve_nix_target_realises_runtime_drv_target (line 236) | def test_resolve_nix_target_realises_runtime_drv_target(monkeypatch):
  function test_resolve_nix_target_uses_first_runtime_drv_output (line 279) | def test_resolve_nix_target_uses_first_runtime_drv_output(monkeypatch):
  function test_resolve_nix_target_rejects_empty_runtime_drv_output (line 301) | def test_resolve_nix_target_rejects_empty_runtime_drv_output(monkeypatch):
  function test_resolve_nix_target_rejects_failed_runtime_drv_realisation (line 320) | def test_resolve_nix_target_rejects_failed_runtime_drv_realisation(monke...

FILE: tests/test_nix_utils_parsing.py
  function test_parse_nix_derivation_show_normalizes_nix_2_33_store_paths (line 16) | def test_parse_nix_derivation_show_normalizes_nix_2_33_store_paths():
  function test_get_nix_store_dir_ignores_colon_separated_env_paths (line 64) | def test_get_nix_store_dir_ignores_colon_separated_env_paths():
  function test_parse_nix_derivation_show_infers_store_dir_from_path_like_env_values (line 74) | def test_parse_nix_derivation_show_infers_store_dir_from_path_like_env_v...
  function test_parse_nix_derivation_show_rejects_changed_wrapper_shape (line 105) | def test_parse_nix_derivation_show_rejects_changed_wrapper_shape():
  function test_parse_nix_derivation_show_rejects_changed_output_shape (line 110) | def test_parse_nix_derivation_show_rejects_changed_output_shape():
  function test_parse_nix_derivation_show_rejects_invalid_json (line 126) | def test_parse_nix_derivation_show_rejects_invalid_json():

FILE: tests/test_nixgraph_graph.py
  class CapturingLogger (line 19) | class CapturingLogger:
    method __init__ (line 20) | def __init__(self):
    method debug (line 23) | def debug(self, msg, *args):
    method info (line 26) | def info(self, msg, *args):
    method warning (line 29) | def warning(self, msg, *args):
    method log (line 32) | def log(self, level, msg, *args):
  function test_dependency_graph_returns_dataframe_for_csv_output (line 36) | def test_dependency_graph_returns_dataframe_for_csv_output():
  function test_dependency_graph_inverse_returns_dataframe_for_csv_output (line 72) | def test_dependency_graph_inverse_returns_dataframe_for_csv_output():
  function test_dependency_graph_writes_raw_dot_without_graphviz_render (line 108) | def test_dependency_graph_writes_raw_dot_without_graphviz_render(tmp_path):
  function test_dependency_graph_deduplicates_rendered_nodes (line 131) | def test_dependency_graph_deduplicates_rendered_nodes():
  function test_dependency_graph_warns_before_large_graphviz_render (line 148) | def test_dependency_graph_warns_before_large_graphviz_render(monkeypatch):
  function test_load_dependencies_logs_dependency_loading_at_info (line 182) | def test_load_dependencies_logs_dependency_loading_at_info(monkeypatch):
  function test_load_dependencies_buildtime_uses_derivation_json (line 203) | def test_load_dependencies_buildtime_uses_derivation_json(monkeypatch):
  function test_load_dependencies_runtime_uses_resolved_output_path (line 249) | def test_load_dependencies_runtime_uses_resolved_output_path(monkeypatch):
  function test_nixgraph_no_longer_exposes_removed_graph_helpers (line 281) | def test_nixgraph_no_longer_exposes_removed_graph_helpers():

FILE: tests/test_nixmeta_parsing.py
  function test_parse_json_metadata_flattens_nested_fields (line 13) | def test_parse_json_metadata_flattens_nested_fields(tmp_path):

FILE: tests/test_nixmeta_progress.py
  class CapturingLogger (line 17) | class CapturingLogger:
    method __init__ (line 18) | def __init__(self):
    method debug (line 21) | def debug(self, msg, *args):
    method info (line 24) | def info(self, msg, *args):
    method warning (line 27) | def warning(self, msg, *args):
    method fatal (line 30) | def fatal(self, msg, *args):
    method log (line 33) | def log(self, level, msg, *args):
  function test_nixmeta_main_logs_scan_start (line 37) | def test_nixmeta_main_logs_scan_start(monkeypatch):
  function test_get_flake_metadata_logs_metadata_read (line 76) | def test_get_flake_metadata_logs_metadata_read():
  function test_get_nixpkgs_flakeref_uses_root_nixpkgs_input_with_renamed_node (line 96) | def test_get_nixpkgs_flakeref_uses_root_nixpkgs_input_with_renamed_node():
  function test_nixmeta_scanner_logs_nix_env_progress (line 128) | def test_nixmeta_scanner_logs_nix_env_progress(tmp_path, monkeypatch):
  function test_run_nix_env_metadata_captures_successful_stderr (line 187) | def test_run_nix_env_metadata_captures_successful_stderr(monkeypatch, tm...
  function test_nixmeta_scanner_tolerates_empty_metadata_json (line 212) | def test_nixmeta_scanner_tolerates_empty_metadata_json(tmp_path, monkeyp...
  function test_nixmeta_expression_scan_enables_flakes (line 233) | def test_nixmeta_expression_scan_enables_flakes(monkeypatch):
  function test_nixmeta_expression_scan_honors_impure (line 258) | def test_nixmeta_expression_scan_honors_impure(monkeypatch):

FILE: tests/test_nixmeta_source.py
  function test_classify_meta_nixpkgs_reserved_modes_before_explicit_source (line 19) | def test_classify_meta_nixpkgs_reserved_modes_before_explicit_source():
  function test_get_nixpkgs_meta_with_source_records_flakeref_lock (line 27) | def test_get_nixpkgs_meta_with_source_records_flakeref_lock(monkeypatch,...
  function test_get_nixpkgs_meta_with_source_records_opt_in_nix_path (line 60) | def test_get_nixpkgs_meta_with_source_records_opt_in_nix_path(monkeypatch):
  function test_explicit_nix_path_source_requires_nixpkgs_entry (line 86) | def test_explicit_nix_path_source_requires_nixpkgs_entry(monkeypatch):
  function test_path_target_without_source_skips_nix_path_metadata (line 102) | def test_path_target_without_source_skips_nix_path_metadata(monkeypatch):
  function test_explicit_store_path_source_records_explicit_method (line 123) | def test_explicit_store_path_source_records_explicit_method(monkeypatch,...
  function test_explicit_flakeref_source_resolves_nixpkgs_path (line 154) | def test_explicit_flakeref_source_resolves_nixpkgs_path(monkeypatch):
  function test_mutable_explicit_path_is_normalized_before_scanning (line 183) | def test_mutable_explicit_path_is_normalized_before_scanning(monkeypatch...
  function test_mutable_explicit_path_is_rejected_if_not_cache_safe (line 214) | def test_mutable_explicit_path_is_rejected_if_not_cache_safe(monkeypatch...
  function test_nixos_toplevel_flakeref_prefers_configuration_pkgs_path (line 229) | def test_nixos_toplevel_flakeref_prefers_configuration_pkgs_path(
  function test_nixos_toplevel_expression_locks_relative_flake_refs (line 295) | def test_nixos_toplevel_expression_locks_relative_flake_refs(
  function test_nixos_toplevel_expression_preserves_locked_subflake_dir (line 369) | def test_nixos_toplevel_expression_preserves_locked_subflake_dir(
  function test_nixos_toplevel_flakeref_handles_quoted_configuration_names (line 454) | def test_nixos_toplevel_flakeref_handles_quoted_configuration_names(
  function test_nixos_toplevel_flakeref_metadata_eval_honors_impure (line 512) | def test_nixos_toplevel_flakeref_metadata_eval_honors_impure(monkeypatch...
  function test_nixos_toplevel_expression_cache_uses_only_stable_refs (line 567) | def test_nixos_toplevel_expression_cache_uses_only_stable_refs(monkeypat...
  function test_nixos_toplevel_expression_scan_failure_skips_metadata (line 619) | def test_nixos_toplevel_expression_scan_failure_skips_metadata(
  function test_nixos_toplevel_flakeref_without_pkgs_path_returns_message (line 663) | def test_nixos_toplevel_flakeref_without_pkgs_path_returns_message(monke...
  function test_nixos_toplevel_flakeref_without_pkgs_returns_message (line 699) | def test_nixos_toplevel_flakeref_without_pkgs_returns_message(
  function test_plain_nixos_configuration_attrset_is_not_target_inferred (line 736) | def test_plain_nixos_configuration_attrset_is_not_target_inferred(
  function test_meta_scan_uses_already_resolved_scanner_path (line 766) | def test_meta_scan_uses_already_resolved_scanner_path(monkeypatch):

FILE: tests/test_nixmeta_source_export.py
  function _make_minimal_sbom (line 16) | def _make_minimal_sbom():
  function test_cdx_document_records_nixpkgs_metadata_source (line 53) | def test_cdx_document_records_nixpkgs_metadata_source(monkeypatch):
  function test_spdx_document_records_nixpkgs_metadata_source (line 70) | def test_spdx_document_records_nixpkgs_metadata_source(monkeypatch):

FILE: tests/test_osv_client.py
  class FakeResponse (line 11) | class FakeResponse:
    method __init__ (line 12) | def __init__(self, payload):
    method json (line 16) | def json(self):
    method raise_for_status (line 19) | def raise_for_status(self):
  class FakeSession (line 23) | class FakeSession:
    method __init__ (line 24) | def __init__(self):
    method post (line 27) | def post(self, url, json=None, timeout=None):
  function test_osv_client_posts_with_timeout_and_parses_results (line 45) | def test_osv_client_posts_with_timeout_and_parses_results(tmp_path):

FILE: tests/test_provenance_batching.py
  function _path_info_paths (line 15) | def _path_info_paths(cmd):
  function test_provenance_hash_query_batches_on_e2big (line 23) | def test_provenance_hash_query_batches_on_e2big():

FILE: tests/test_provenance_path_info.py
  function test_normalize_path_info_rejects_malformed_list_records (line 20) | def test_normalize_path_info_rejects_malformed_list_records():
  function test_normalize_path_info_rejects_malformed_object_records (line 25) | def test_normalize_path_info_rejects_malformed_object_records():
  function test_normalize_path_info_supports_list_records (line 30) | def test_normalize_path_info_supports_list_records():
  function test_nar_hash_for_path_rejects_missing_hash (line 45) | def test_nar_hash_for_path_rejects_missing_hash():
  function test_nar_hash_for_path_rejects_missing_record (line 50) | def test_nar_hash_for_path_rejects_missing_record():
  function test_dependency_paths_rejects_mismatched_path_info_record (line 55) | def test_dependency_paths_rejects_mismatched_path_info_record():
  function test_dependency_paths_recursive_includes_derivation_outputs (line 69) | def test_dependency_paths_recursive_includes_derivation_outputs():
  function test_query_path_info_wraps_nix_command_failures (line 102) | def test_query_path_info_wraps_nix_command_failures():

FILE: tests/test_provenance_subjects.py
  function _dependency_hooks (line 32) | def _dependency_hooks(*, exec_cmd_fn, query_path_hashes_fn=None):
  function _subject_hooks (line 53) | def _subject_hooks(exec_cmd_fn):
  function _path_info_paths (line 63) | def _path_info_paths(cmd):
  function test_get_dependencies_supports_nix_2_33_wrapped_json (line 72) | def test_get_dependencies_supports_nix_2_33_wrapped_json():
  function test_normalize_digest_does_not_shell_out (line 117) | def test_normalize_digest_does_not_shell_out():
  function test_normalize_digest_rejects_overflowing_nix32_values (line 130) | def test_normalize_digest_rejects_overflowing_nix32_values():
  function test_dependency_package_skips_non_normalized_digest (line 134) | def test_dependency_package_skips_non_normalized_digest(caplog):
  function test_get_dependencies_prefers_fixed_output_digest_for_output_paths (line 154) | def test_get_dependencies_prefers_fixed_output_digest_for_output_paths():
  function test_get_dependencies_maps_env_only_output_paths_back_to_derivations (line 207) | def test_get_dependencies_maps_env_only_output_paths_back_to_derivations():
  function test_get_dependencies_wraps_derivation_show_failures (line 256) | def test_get_dependencies_wraps_derivation_show_failures():
  function test_get_subjects_falls_back_to_env_output_paths (line 273) | def test_get_subjects_falls_back_to_env_output_paths():
  function test_get_subjects_prefers_derivation_hash_for_realized_flat_outputs (line 300) | def test_get_subjects_prefers_derivation_hash_for_realized_flat_outputs():
  function test_get_subjects_uses_derivation_hash_when_output_is_not_realized (line 321) | def test_get_subjects_uses_derivation_hash_when_output_is_not_realized():
  function test_get_subjects_supports_resource_sha256_metadata (line 342) | def test_get_subjects_supports_resource_sha256_metadata():
  function test_get_subjects_skips_unrealized_outputs_without_digest (line 367) | def test_get_subjects_skips_unrealized_outputs_without_digest():
  function test_get_subjects_skip_only_missing_unrealized_outputs (line 380) | def test_get_subjects_skip_only_missing_unrealized_outputs():
  function test_provenance_uses_store_path_hint_for_nix_2_33_outputs_without_path (line 409) | def test_provenance_uses_store_path_hint_for_nix_2_33_outputs_without_pa...
  function test_provenance_wraps_target_derivation_show_failures (line 457) | def test_provenance_wraps_target_derivation_show_failures(monkeypatch):
  function test_provenance_rejects_empty_target_derivation_metadata (line 474) | def test_provenance_rejects_empty_target_derivation_metadata(monkeypatch):
  function test_provenance_rejects_target_derivation_without_outputs (line 492) | def test_provenance_rejects_target_derivation_without_outputs(monkeypatch):
  function test_provenance_keeps_fixed_output_subjects_when_output_is_not_realized (line 523) | def test_provenance_keeps_fixed_output_subjects_when_output_is_not_reali...

FILE: tests/test_repology_adapter.py
  class FakeResponse (line 20) | class FakeResponse:
    method __init__ (line 21) | def __init__(self, text, status_code=200):
    method raise_for_status (line 25) | def raise_for_status(self):
  class MappingSession (line 30) | class MappingSession:
    method __init__ (line 31) | def __init__(self, responses):
    method get (line 35) | def get(self, url, timeout=None):
  function _fixture_text (line 42) | def _fixture_text(name):
  function test_repology_adapter_pkg_exact_parses_fixture_and_uses_timeout (line 46) | def test_repology_adapter_pkg_exact_parses_fixture_and_uses_timeout():
  function test_repology_adapter_pkg_exact_raises_for_empty_results (line 71) | def test_repology_adapter_pkg_exact_raises_for_empty_results():
  function test_repology_adapter_sbom_query_marks_special_statuses (line 90) | def test_repology_adapter_sbom_query_marks_special_statuses(tmp_path):
  function test_repology_adapter_query_cves_parses_fixture_and_uses_timeout (line 144) | def test_repology_adapter_query_cves_parses_fixture_and_uses_timeout():

FILE: tests/test_repology_cve.py
  class FakeResponse (line 15) | class FakeResponse:
    method __init__ (line 16) | def __init__(self, text, status_code=200):
    method raise_for_status (line 20) | def raise_for_status(self):
  class MappingSession (line 25) | class MappingSession:
    method __init__ (line 26) | def __init__(self, responses):
    method get (line 30) | def get(self, url, timeout=None):
  function test_query_cve_parses_fixture_and_uses_timeout (line 37) | def test_query_cve_parses_fixture_and_uses_timeout():

FILE: tests/test_repology_projects_parser.py
  function _fixture_text (line 17) | def _fixture_text(name):
  function test_parse_projects_search_html_parses_fixture_rows (line 21) | def test_parse_projects_search_html_parses_fixture_rows():
  function test_parse_projects_search_html_respects_already_processed_packages (line 49) | def test_parse_projects_search_html_respects_already_processed_packages():
  function test_parse_projects_search_html_raises_for_malformed_table (line 61) | def test_parse_projects_search_html_raises_for_malformed_table():

FILE: tests/test_repology_sbom.py
  function test_parse_cdx_sbom_normalizes_names_and_includes_metadata_component (line 21) | def test_parse_cdx_sbom_normalizes_names_and_includes_metadata_component...
  function test_merge_sbom_fields_and_classify_outdated_versions (line 45) | def test_merge_sbom_fields_and_classify_outdated_versions():
  function test_sbom_status_helpers_cover_ignored_rows (line 67) | def test_sbom_status_helpers_cover_ignored_rows():

FILE: tests/test_runtime_closure.py
  function test_runtime_closure_from_path_info_extracts_edges_and_derivers (line 17) | def test_runtime_closure_from_path_info_extracts_edges_and_derivers():
  function test_runtime_closure_from_path_info_supports_list_payloads (line 52) | def test_runtime_closure_from_path_info_supports_list_payloads():
  function test_runtime_closure_from_path_info_rejects_missing_references (line 71) | def test_runtime_closure_from_path_info_rejects_missing_references():
  function test_runtime_closure_from_path_info_rejects_malformed_reference_items (line 82) | def test_runtime_closure_from_path_info_rejects_malformed_reference_item...
  function test_load_runtime_closure_wraps_nix_command_failures (line 93) | def test_load_runtime_closure_wraps_nix_command_failures(monkeypatch):

FILE: tests/test_sbom_closure.py
  function _dependency_df (line 17) | def _dependency_df():
  function test_dependencies_to_depth_returns_reachable_dependency_rows (line 42) | def test_dependencies_to_depth_returns_reachable_dependency_rows():
  function test_walk_dependency_rows_supports_inverse_traversal (line 61) | def test_walk_dependency_rows_supports_inverse_traversal():
  function test_walk_dependency_rows_stops_after_matching_boundary_row (line 80) | def test_walk_dependency_rows_stops_after_matching_boundary_row():
  function test_dependencies_to_depth_returns_empty_dataframe_for_missing_start (line 99) | def test_dependencies_to_depth_returns_empty_dataframe_for_missing_start():
  function test_dependencies_to_depth_deduplicates_shared_diamond_edges (line 111) | def test_dependencies_to_depth_deduplicates_shared_diamond_edges():
  function test_dependency_paths_returns_all_source_and_target_paths (line 183) | def test_dependency_paths_returns_all_source_and_target_paths():

FILE: tests/test_sbom_vuln_enrichment.py
  class CapturingLogger (line 22) | class CapturingLogger:
    method __init__ (line 23) | def __init__(self):
    method info (line 26) | def info(self, msg, *args):
    method fatal (line 29) | def fatal(self, msg, *args):
  function test_sbomnix_getargs_accepts_meta_nixpkgs (line 33) | def test_sbomnix_getargs_accepts_meta_nixpkgs():
  function test_sbomnix_run_rejects_exclude_meta_with_meta_nixpkgs (line 45) | def test_sbomnix_run_rejects_exclude_meta_with_meta_nixpkgs():
  function test_sbomnix_main_enriches_cdx_explicitly_when_include_vulns_is_set (line 65) | def test_sbomnix_main_enriches_cdx_explicitly_when_include_vulns_is_set(...
  function test_sbomnix_main_logs_generation_before_initializing_builder (line 144) | def test_sbomnix_main_logs_generation_before_initializing_builder(monkey...
  function test_to_cdx_no_longer_triggers_vulnerability_scans (line 201) | def test_to_cdx_no_longer_triggers_vulnerability_scans(tmp_path, monkeyp...
  function test_sbom_vuln_enrichment_scans_expected_nix_target (line 265) | def test_sbom_vuln_enrichment_scans_expected_nix_target(
  function test_sbom_vuln_tempfile_is_removed_on_scan_failure (line 305) | def test_sbom_vuln_tempfile_is_removed_on_scan_failure(tmp_path, monkeyp...

FILE: tests/test_schema_validation.py
  function test_local_schema_aliases_resolve_to_vendored_resources (line 12) | def test_local_schema_aliases_resolve_to_vendored_resources():
  function test_validate_json_uses_only_local_schema_resources (line 33) | def test_validate_json_uses_only_local_schema_resources():

FILE: tests/test_store_batching.py
  function test_load_many_batches_nix_derivation_show_and_preserves_outputs (line 18) | def test_load_many_batches_nix_derivation_show_and_preserves_outputs(mon...
  function test_load_many_supports_output_path_queries (line 94) | def test_load_many_supports_output_path_queries(monkeypatch):
  function test_load_many_maps_output_queries_from_derivation_env (line 156) | def test_load_many_maps_output_queries_from_derivation_env(monkeypatch):
  function test_load_many_can_ignore_missing_output_derivations (line 200) | def test_load_many_can_ignore_missing_output_derivations(monkeypatch):
  function test_load_recursive_wraps_nix_command_failures (line 252) | def test_load_recursive_wraps_nix_command_failures(monkeypatch):
  function test_load_rejects_empty_derivation_metadata (line 268) | def test_load_rejects_empty_derivation_metadata(monkeypatch):
  function test_load_recursive_rejects_empty_derivation_metadata (line 282) | def test_load_recursive_rejects_empty_derivation_metadata(monkeypatch):

FILE: tests/test_temp_sbom_generation.py
  function test_vulnxscan_cleans_generated_tempfiles_on_failure (line 17) | def test_vulnxscan_cleans_generated_tempfiles_on_failure(tmp_path, monke...
  function test_generate_temp_sbom_without_csv_returns_only_cdx_path (line 76) | def test_generate_temp_sbom_without_csv_returns_only_cdx_path(tmp_path, ...
  function test_generate_temp_sbom_cleans_tempfiles_on_generation_failure (line 124) | def test_generate_temp_sbom_cleans_tempfiles_on_generation_failure(
  function test_generate_temp_sbom_cleans_first_tempfile_if_second_creation_fails (line 176) | def test_generate_temp_sbom_cleans_first_tempfile_if_second_creation_fails(

FILE: tests/test_vulnix_test_support.py
  function test_build_vulnix_test_env_prepends_wrapper_dir (line 19) | def test_build_vulnix_test_env_prepends_wrapper_dir(tmp_path):
  function test_dummy_vulnix_wrapper_returns_empty_json (line 38) | def test_dummy_vulnix_wrapper_returns_empty_json(tmp_path):
  function test_real_vulnix_wrapper_forwards_cache_dir_and_args (line 58) | def test_real_vulnix_wrapper_forwards_cache_dir_and_args(tmp_path):
  function test_configure_vulnix_for_tests_rejects_unknown_mode (line 96) | def test_configure_vulnix_for_tests_rejects_unknown_mode(tmp_path):
  function test_real_vulnix_wrapper_shows_clear_error_when_binary_missing (line 107) | def test_real_vulnix_wrapper_shows_clear_error_when_binary_missing(tmp_p...
  function test_ensure_real_vulnix_cache_surfaces_warmup_errors (line 130) | def test_ensure_real_vulnix_cache_surfaces_warmup_errors(tmp_path):
  function test_real_vulnix_wrapper_executes_real_binary (line 158) | def test_real_vulnix_wrapper_executes_real_binary(tmp_path):

FILE: tests/test_vulnxscan_engine.py
  function test_parse_vulnix_json_updates_cvss_cache (line 19) | def test_parse_vulnix_json_updates_cvss_cache():
  function test_parse_grype_json_prefers_cvss_v3_scores (line 41) | def test_parse_grype_json_prefers_cvss_v3_scores():
  function test_build_report_dataframe_merges_scanner_counts (line 75) | def test_build_report_dataframe_merges_scanner_counts():
  function test_write_reports_writes_triage_report (line 119) | def test_write_reports_writes_triage_report(tmp_path):
  function test_scan_vulnix_uses_argv_lists (line 139) | def test_scan_vulnix_uses_argv_lists(monkeypatch, buildtime, expected_cmd):

FILE: tests/test_vulnxscan_triage.py
  class FakeRepologyLookup (line 18) | class FakeRepologyLookup:
    method __init__ (line 19) | def __init__(self):
    method is_vulnerable (line 23) | def is_vulnerable(self, package, version, vuln_id=None):
    method query_repology_versions (line 27) | def query_repology_versions(self, df_vuln_pkgs):
  class FakeGitHubLookup (line 46) | class FakeGitHubLookup:
    method __init__ (line 47) | def __init__(self):
    method find_nixpkgs_prs (line 50) | def find_nixpkgs_prs(self, row):
  class FakeAdapter (line 55) | class FakeAdapter:
    method __init__ (line 56) | def __init__(self):
    method query (line 59) | def query(self, repology_query):
  function test_classify_vulnerability_marks_fixable_nixpkgs_update (line 79) | def test_classify_vulnerability_marks_fixable_nixpkgs_update():
  function test_triage_vulnerabilities_groups_rows_and_adds_nixpkgs_prs (line 98) | def test_triage_vulnerabilities_groups_rows_and_adds_nixpkgs_prs():
  function test_github_pr_lookup_queries_vuln_and_version_matches (line 134) | def test_github_pr_lookup_queries_vuln_and_version_matches():
  function test_query_repology_versions_prefers_exact_version_match (line 178) | def test_query_repology_versions_prefers_exact_version_match():
  function test_query_repology_rejects_unknown_match_type (line 213) | def test_query_repology_rejects_unknown_match_type():

FILE: tests/test_whitelist.py
  function test_whitelist (line 14) | def test_whitelist():

FILE: tests/testutils.py
  function resolve_local_schema_path (line 24) | def resolve_local_schema_path(uri, schema_dir):
  function create_local_schema_retriever (line 40) | def create_local_schema_retriever(schema_dir):
  function validate_json (line 50) | def validate_json(file_path, schema_path):
  function df_to_string (line 68) | def df_to_string(df):
  function df_difference (line 77) | def df_difference(df_left, df_right):

FILE: tests/vulnix_test_support.py
  class VulnixTestConfig (line 22) | class VulnixTestConfig:
  function default_vulnix_cache_dir (line 31) | def default_vulnix_cache_dir(env: dict[str, str] | None = None) -> Path:
  function vulnix_cache_ready (line 40) | def vulnix_cache_ready(cache_dir: Path) -> bool:
  function write_vulnix_wrapper (line 46) | def write_vulnix_wrapper(wrapper_dir: Path) -> Path:
  function build_vulnix_test_env (line 79) | def build_vulnix_test_env(
  function configure_vulnix_for_tests (line 98) | def configure_vulnix_for_tests(
  function ensure_real_vulnix_cache (line 126) | def ensure_real_vulnix_cache(
Condensed preview — 186 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (985K chars).
[
  {
    "path": ".envrc",
    "chars": 264,
    "preview": "#! /usr/bin/env bash\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: A"
  },
  {
    "path": ".github/dependabot.yml",
    "chars": 111,
    "preview": "version: 2\nupdates:\n  - package-ecosystem: github-actions\n    directory: /\n    schedule:\n      interval: daily\n"
  },
  {
    "path": ".github/workflows/codeql.yml",
    "chars": 2911,
    "preview": "# For most projects, this workflow file will not need changing; you simply need\n# to commit it to your repository.\n#\n# Y"
  },
  {
    "path": ".github/workflows/dependency-review.yml",
    "chars": 1001,
    "preview": "# Dependency Review Action\n#\n# This Action will scan dependency manifest files that change as part of a Pull Request,\n# "
  },
  {
    "path": ".github/workflows/release_sbomnix.yml",
    "chars": 1202,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\nname: Uplo"
  },
  {
    "path": ".github/workflows/scorecards.yml",
    "chars": 3230,
    "preview": "# This workflow uses actions that are not certified by GitHub. They are provided\n# by a third-party and are governed by "
  },
  {
    "path": ".github/workflows/test_sbomnix.yml",
    "chars": 1150,
    "preview": "# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\nname:"
  },
  {
    "path": ".gitignore",
    "chars": 378,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\ntemp/\nvenv"
  },
  {
    "path": ".gitlint",
    "chars": 444,
    "preview": "# SPDX-FileCopyrightText: 2025 TII (SSRC) and the Ghaf contributors\n# SPDX-License-Identifier: Apache-2.0\n\n[general]\n# I"
  },
  {
    "path": "LICENSES/Apache-2.0.txt",
    "chars": 10280,
    "preview": "Apache License\nVersion 2.0, January 2004\nhttp://www.apache.org/licenses/\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AN"
  },
  {
    "path": "LICENSES/BSD-3-Clause.txt",
    "chars": 1460,
    "preview": "Copyright (c) <year> <owner>. \n\nRedistribution and use in source and binary forms, with or without modification, are per"
  },
  {
    "path": "LICENSES/CC-BY-3.0.txt",
    "chars": 18666,
    "preview": "Creative Commons Attribution 3.0 Unported\n\n CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL SE"
  },
  {
    "path": "LICENSES/CC-BY-SA-4.0.txt",
    "chars": 18329,
    "preview": "Creative Commons Attribution-ShareAlike 4.0 International\n\n Creative Commons Corporation (“Creative Commons”) is not a l"
  },
  {
    "path": "LICENSES/MIT.txt",
    "chars": 1078,
    "preview": "MIT License\n\nCopyright (c) <year> <copyright holders>\n\nPermission is hereby granted, free of charge, to any person obtai"
  },
  {
    "path": "README.md",
    "chars": 11983,
    "preview": "<!--\nSPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n\nSPDX-License-Identifier: CC-BY-SA-4.0\n-->\n"
  },
  {
    "path": "REUSE.toml",
    "chars": 740,
    "preview": "# SPDX-FileCopyrightText: 2022-2025 Technology Innovation Institute (TII)\n# SPDX-License-Identifier: Apache-2.0\nversion "
  },
  {
    "path": "VERSION",
    "chars": 6,
    "preview": "1.7.6\n"
  },
  {
    "path": "default.nix",
    "chars": 746,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n# SPDX-FileCopyrightText: 2020-2023 Eelco Dolstra a"
  },
  {
    "path": "doc/nix_outdated.md",
    "chars": 5875,
    "preview": "<!--\nSPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n\nSPDX-License-Identifier: CC-BY-SA-4.0\n-->\n\n# Ge"
  },
  {
    "path": "doc/nixgraph.md",
    "chars": 5661,
    "preview": "<!--\nSPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n\nSPDX-License-Identifier: CC-BY-SA-4.0\n-->\n"
  },
  {
    "path": "doc/nixmeta.md",
    "chars": 2774,
    "preview": "<!--\nSPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n\nSPDX-License-Identifier: CC-BY-SA-4.0\n-->\n\n# Ge"
  },
  {
    "path": "doc/provenance.md",
    "chars": 3317,
    "preview": "<!--\nSPDX-FileCopyrightText: 2024 Technology Innovation Institute (TII)\n\nSPDX-License-Identifier: CC-BY-SA-4.0\n-->\n\n# Ge"
  },
  {
    "path": "doc/repology_cli.md",
    "chars": 15776,
    "preview": "<!--\nSPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n\nSPDX-License-Identifier: CC-BY-SA-4.0\n-->\n\n# re"
  },
  {
    "path": "doc/vulnxscan.md",
    "chars": 33063,
    "preview": "<!--\nSPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n\nSPDX-License-Identifier: CC-BY-SA-4.0\n-->\n\n# vu"
  },
  {
    "path": "flake.nix",
    "chars": 982,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n{\n  descrip"
  },
  {
    "path": "nix/apps.nix",
    "chars": 1386,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n{\n  perSyst"
  },
  {
    "path": "nix/default.nix",
    "chars": 206,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n{\n  imports"
  },
  {
    "path": "nix/formatter.nix",
    "chars": 430,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n{ ... }:\n{\n"
  },
  {
    "path": "nix/git-hooks.nix",
    "chars": 2026,
    "preview": "# SPDX-FileCopyrightText: 2025-2026 TII (SSRC) and the Ghaf contributors\n# SPDX-License-Identifier: Apache-2.0\n{ inputs,"
  },
  {
    "path": "nix/packages.nix",
    "chars": 4103,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n{ self, ..."
  },
  {
    "path": "pyproject.toml",
    "chars": 1965,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n# SPDX-License-Identifier: Apache-2.0\n\n[build-syste"
  },
  {
    "path": "pyrightconfig.json",
    "chars": 157,
    "preview": "{\n    \"include\": [\"src\"],\n    \"extraPaths\": [\"src\"],\n    \"pythonVersion\": \"3.10\",\n    \"typeCheckingMode\": \"standard\",\n  "
  },
  {
    "path": "pytest.ini",
    "chars": 490,
    "preview": "# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n[pyte"
  },
  {
    "path": "scripts/check-fast.sh",
    "chars": 292,
    "preview": "#!/usr/bin/env bash\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: A"
  },
  {
    "path": "scripts/check-full.sh",
    "chars": 273,
    "preview": "#!/usr/bin/env bash\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: A"
  },
  {
    "path": "scripts/release-asset.sh",
    "chars": 573,
    "preview": "#!/usr/bin/env bash\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: A"
  },
  {
    "path": "scripts/run-pytest-lane.sh",
    "chars": 705,
    "preview": "#!/usr/bin/env bash\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: A"
  },
  {
    "path": "shell.nix",
    "chars": 744,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n# SPDX-FileCopyrightText: 2020-2023 Eelco Dolstra a"
  },
  {
    "path": "src/common/__init__.py",
    "chars": 109,
    "preview": "# SPDX-FileCopyrightText: 2022 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n"
  },
  {
    "path": "src/common/cli_args.py",
    "chars": 3997,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Common "
  },
  {
    "path": "src/common/columns.py",
    "chars": 1221,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Shared "
  },
  {
    "path": "src/common/df.py",
    "chars": 2074,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Shared "
  },
  {
    "path": "src/common/errors.py",
    "chars": 4406,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Shared "
  },
  {
    "path": "src/common/flakeref.py",
    "chars": 7774,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Flakere"
  },
  {
    "path": "src/common/http.py",
    "chars": 2146,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Shared "
  },
  {
    "path": "src/common/log.py",
    "chars": 2881,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Shared "
  },
  {
    "path": "src/common/nix_utils.py",
    "chars": 15495,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/common/package_names.py",
    "chars": 1066,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Package"
  },
  {
    "path": "src/common/pkgmeta.py",
    "chars": 1853,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Package"
  },
  {
    "path": "src/common/proc.py",
    "chars": 4118,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Shared "
  },
  {
    "path": "src/common/regex.py",
    "chars": 382,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Small r"
  },
  {
    "path": "src/common/spdx.py",
    "chars": 677,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/common/versioning.py",
    "chars": 2956,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Shared "
  },
  {
    "path": "src/nixgraph/__init__.py",
    "chars": 109,
    "preview": "# SPDX-FileCopyrightText: 2022 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n"
  },
  {
    "path": "src/nixgraph/graph.py",
    "chars": 1914,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Ident"
  },
  {
    "path": "src/nixgraph/main.py",
    "chars": 3533,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Ident"
  },
  {
    "path": "src/nixgraph/render.py",
    "chars": 6591,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/nixmeta/__init__.py",
    "chars": 109,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n"
  },
  {
    "path": "src/nixmeta/flake_metadata.py",
    "chars": 6397,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/nixmeta/main.py",
    "chars": 3072,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n# SPDX-License-Identifier: "
  },
  {
    "path": "src/nixmeta/metadata_json.py",
    "chars": 2069,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/nixmeta/scanner.py",
    "chars": 5576,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n# SPDX-License-Identifier: "
  },
  {
    "path": "src/nixupdate/__init__.py",
    "chars": 109,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n"
  },
  {
    "path": "src/nixupdate/nix_outdated.py",
    "chars": 5427,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/nixupdate/nix_visualize.py",
    "chars": 1559,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/nixupdate/pipeline.py",
    "chars": 2936,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Executi"
  },
  {
    "path": "src/nixupdate/report.py",
    "chars": 4303,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Report "
  },
  {
    "path": "src/provenance/__init__.py",
    "chars": 109,
    "preview": "# SPDX-FileCopyrightText: 2024 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n"
  },
  {
    "path": "src/provenance/dependencies.py",
    "chars": 4921,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/provenance/digests.py",
    "chars": 3798,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Digest "
  },
  {
    "path": "src/provenance/main.py",
    "chars": 4726,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2024 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/provenance/nix_commands.py",
    "chars": 581,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Nix com"
  },
  {
    "path": "src/provenance/path_info.py",
    "chars": 2314,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Structu"
  },
  {
    "path": "src/provenance/schema.py",
    "chars": 4435,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/provenance/subjects.py",
    "chars": 3455,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/repology/__init__.py",
    "chars": 206,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Repolog"
  },
  {
    "path": "src/repology/adapter.py",
    "chars": 10042,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Repolog"
  },
  {
    "path": "src/repology/cves.py",
    "chars": 4408,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Helpers"
  },
  {
    "path": "src/repology/exceptions.py",
    "chars": 484,
    "preview": "# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Repolog"
  },
  {
    "path": "src/repology/projects_parser.py",
    "chars": 4162,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"HTML pa"
  },
  {
    "path": "src/repology/repology_cli.py",
    "chars": 5562,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/repology/repology_cve.py",
    "chars": 2571,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/repology/reporting.py",
    "chars": 5300,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Console"
  },
  {
    "path": "src/repology/sbom.py",
    "chars": 3114,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Cyclone"
  },
  {
    "path": "src/repology/session.py",
    "chars": 668,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Shared "
  },
  {
    "path": "src/sbomnix/__init__.py",
    "chars": 109,
    "preview": "# SPDX-FileCopyrightText: 2022 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n"
  },
  {
    "path": "src/sbomnix/builder.py",
    "chars": 14942,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Ident"
  },
  {
    "path": "src/sbomnix/cdx.py",
    "chars": 6157,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Ident"
  },
  {
    "path": "src/sbomnix/cli_utils.py",
    "chars": 4174,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/sbomnix/closure.py",
    "chars": 4634,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/sbomnix/components.py",
    "chars": 2141,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/sbomnix/cpe.py",
    "chars": 5377,
    "preview": "# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Ge"
  },
  {
    "path": "src/sbomnix/dependency_index.py",
    "chars": 4055,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/sbomnix/derivation.py",
    "chars": 12339,
    "preview": "# From: https://github.com/flyingcircusio/vulnix/blob/1.10.1/LICENSE:\n# SPDX-License-Identifier: BSD-3-Clause\n# SPDX-Fil"
  },
  {
    "path": "src/sbomnix/derivers.py",
    "chars": 2290,
    "preview": "# From: https://github.com/flyingcircusio/vulnix/blob/1.10.1/LICENSE:\n# SPDX-License-Identifier: BSD-3-Clause\n# SPDX-Fil"
  },
  {
    "path": "src/sbomnix/dfcache.py",
    "chars": 1765,
    "preview": "# SPDX-FileCopyrightText: 2022-2024 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Th"
  },
  {
    "path": "src/sbomnix/exporters.py",
    "chars": 7859,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/sbomnix/main.py",
    "chars": 4812,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Ident"
  },
  {
    "path": "src/sbomnix/meta.py",
    "chars": 7276,
    "preview": "# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Ca"
  },
  {
    "path": "src/sbomnix/meta_source.py",
    "chars": 14466,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Resolve"
  },
  {
    "path": "src/sbomnix/runtime.py",
    "chars": 2352,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/sbomnix/vuln_enrichment.py",
    "chars": 2234,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Cyclone"
  },
  {
    "path": "src/vulnxscan/__init__.py",
    "chars": 109,
    "preview": "# SPDX-FileCopyrightText: 2022 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n"
  },
  {
    "path": "src/vulnxscan/github_prs.py",
    "chars": 3723,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/osv.py",
    "chars": 2157,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/osv_client.py",
    "chars": 4689,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n\"\"\"Reusabl"
  },
  {
    "path": "src/vulnxscan/parsers.py",
    "chars": 4147,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/repology_lookup.py",
    "chars": 8819,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/reporting.py",
    "chars": 4767,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/scanners.py",
    "chars": 1278,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/triage.py",
    "chars": 4019,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/utils.py",
    "chars": 2140,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/vulnscan.py",
    "chars": 5596,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/vulnxscan_cli.py",
    "chars": 5927,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "src/vulnxscan/whitelist.py",
    "chars": 4234,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "tests/__init__.py",
    "chars": 132,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/compare_deps.py",
    "chars": 10014,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "tests/compare_sboms.py",
    "chars": 5986,
    "preview": "#!/usr/bin/env python3\n\n# SPDX-FileCopyrightText: 2023 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier"
  },
  {
    "path": "tests/conftest.py",
    "chars": 8097,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/integration/__init__.py",
    "chars": 132,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/integration/test_nixgraph_cli.py",
    "chars": 9513,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/integration/test_nixmeta_cli.py",
    "chars": 1055,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/integration/test_nixupdate_cli.py",
    "chars": 916,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/integration/test_provenance_cli.py",
    "chars": 1515,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/integration/test_repology_cli.py",
    "chars": 982,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/integration/test_sbomnix_cli.py",
    "chars": 5738,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/integration/test_vulnxscan_cli.py",
    "chars": 3589,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/resources/README.md",
    "chars": 1182,
    "preview": "<!--\nSPDX-FileCopyrightText: 2022 Technology Innovation Institute (TII)\n\nSPDX-License-Identifier: CC-BY-SA-4.0\n-->\n\n# Te"
  },
  {
    "path": "tests/resources/cdx_bom-1.3.schema.json",
    "chars": 39180,
    "preview": "{\n  \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n  \"$id\": \"http://cyclonedx.org/schema/bom-1.3a.schema.json\",\n "
  },
  {
    "path": "tests/resources/cdx_bom-1.4.schema.json",
    "chars": 72380,
    "preview": "{\n  \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n  \"$id\": \"http://cyclonedx.org/schema/bom-1.4.schema.json\",\n  "
  },
  {
    "path": "tests/resources/grype-test-db.tar.gz.license",
    "chars": 103,
    "preview": "SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\nSPDX-License-Identifier: Apache-2.0\n"
  },
  {
    "path": "tests/resources/jsf-0.82.schema.json",
    "chars": 263,
    "preview": "{\n  \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n  \"$id\": \"http://cyclonedx.org/schema/jsf-0.82.schema.json\",\n "
  },
  {
    "path": "tests/resources/make_grype_test_db.py",
    "chars": 8576,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/resources/nixmeta-package-set.nix",
    "chars": 1075,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n{ ... }:\n\n"
  },
  {
    "path": "tests/resources/provenance-1.0.schema.json",
    "chars": 6030,
    "preview": "{\n  \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n  \"$id\": \"https://in-toto.io/Statement/v1\",\n  \"title\": \"SLSA P"
  },
  {
    "path": "tests/resources/repology/cves_openssl.html",
    "chars": 537,
    "preview": "<html>\n  <body>\n    <table>\n      <thead>\n        <tr>\n          <th>CVE ID</th>\n          <th>Affected version(s)</th>\n"
  },
  {
    "path": "tests/resources/repology/projects_empty.html",
    "chars": 56,
    "preview": "<html>\n  <body>\n    <p>No matches</p>\n  </body>\n</html>\n"
  },
  {
    "path": "tests/resources/repology/projects_hello.html",
    "chars": 671,
    "preview": "<html>\n  <body>\n    <table>\n      <thead>\n        <tr>\n          <th>Project</th>\n          <th>Newest</th>\n          <t"
  },
  {
    "path": "tests/resources/sample_cdx_sbom.json",
    "chars": 437,
    "preview": "{\n  \"bomFormat\": \"CycloneDX\",\n  \"specVersion\": \"1.4\",\n  \"serialNumber\": \"urn:uuid:11111111-1111-4111-8111-111111111111\","
  },
  {
    "path": "tests/resources/spdx.schema.json",
    "chars": 262,
    "preview": "{\n  \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n  \"$id\": \"http://cyclonedx.org/schema/spdx.schema.json\",\n  \"ty"
  },
  {
    "path": "tests/resources/spdx_bom-2.3.schema.json",
    "chars": 45304,
    "preview": "{\n  \"$schema\" : \"http://json-schema.org/draft-07/schema#\",\n  \"$id\" : \"http://spdx.org/rdf/terms/2.3\",\n  \"title\" : \"SPDX "
  },
  {
    "path": "tests/resources/test-derivation-chain.nix",
    "chars": 916,
    "preview": "# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier: Apache-2.0\n\n{\n  system"
  },
  {
    "path": "tests/test_builder_runtime.py",
    "chars": 8170,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_buildtime_closure.py",
    "chars": 3966,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_cli_conventions.py",
    "chars": 7644,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_cli_error_boundaries.py",
    "chars": 4607,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_cli_smoke.py",
    "chars": 3106,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_common_log.py",
    "chars": 1192,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_common_versioning.py",
    "chars": 3029,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_compare_deps.py",
    "chars": 1626,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_components.py",
    "chars": 3205,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_cpe.py",
    "chars": 1466,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_dependency_index.py",
    "chars": 2433,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_derivation_hardening.py",
    "chars": 6354,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_flakeref_resolution.py",
    "chars": 9727,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_library_exceptions.py",
    "chars": 4480,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nix_cli_argv.py",
    "chars": 7078,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nix_outdated_pipeline.py",
    "chars": 9676,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nix_target_resolution.py",
    "chars": 9274,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nix_utils_parsing.py",
    "chars": 4359,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nixgraph_graph.py",
    "chars": 8762,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nixmeta_parsing.py",
    "chars": 1798,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nixmeta_progress.py",
    "chars": 7893,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nixmeta_source.py",
    "chars": 25075,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_nixmeta_source_export.py",
    "chars": 2752,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_osv_client.py",
    "chars": 2053,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_provenance_batching.py",
    "chars": 1746,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_provenance_path_info.py",
    "chars": 3795,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_provenance_subjects.py",
    "chars": 20068,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_repology_adapter.py",
    "chars": 5032,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_repology_cve.py",
    "chars": 1605,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_repology_projects_parser.py",
    "chars": 2070,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_repology_sbom.py",
    "chars": 2208,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_runtime_closure.py",
    "chars": 3713,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_sbom_closure.py",
    "chars": 5365,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_sbom_vuln_enrichment.py",
    "chars": 11204,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_schema_validation.py",
    "chars": 1154,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_store_batching.py",
    "chars": 9382,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_temp_sbom_generation.py",
    "chars": 6937,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_vulnix_test_support.py",
    "chars": 5738,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_vulnxscan_engine.py",
    "chars": 4787,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_vulnxscan_triage.py",
    "chars": 6782,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/test_whitelist.py",
    "chars": 1033,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/testpaths.py",
    "chars": 825,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/testutils.py",
    "chars": 2657,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  },
  {
    "path": "tests/vulnix_test_support.py",
    "chars": 5544,
    "preview": "#!/usr/bin/env python3\n# SPDX-FileCopyrightText: 2026 Technology Innovation Institute (TII)\n#\n# SPDX-License-Identifier:"
  }
]

About this extraction

This page contains the full source code of the tiiuae/sbomnix GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 186 files (903.4 KB), approximately 225.3k tokens, and a symbol index with 841 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!