main b66c7843f0ee cached
32 files
104.6 KB
25.9k tokens
67 symbols
1 requests
Download .txt
Repository: DeterminateSystems/flake-checker
Branch: main
Commit: b66c7843f0ee
Files: 32
Total size: 104.6 KB

Directory structure:
gitextract_m548giyf/

├── .cargo/
│   └── config.toml
├── .editorconfig
├── .envrc
├── .github/
│   └── workflows/
│       ├── build.yaml
│       ├── ci.yaml
│       ├── flakehub-publish-tagged.yaml
│       ├── ref-statuses.yaml
│       ├── release-branches.yaml
│       ├── release-prs.yaml
│       ├── release-tags.yaml
│       └── update-flake-lock.yaml
├── .gitignore
├── Cargo.toml
├── LICENSE
├── README.md
├── flake.nix
├── parse-flake-lock/
│   ├── Cargo.toml
│   └── src/
│       └── lib.rs
├── ref-statuses.json
├── src/
│   ├── condition.rs
│   ├── error.rs
│   ├── flake.rs
│   ├── issue.rs
│   ├── main.rs
│   ├── ref_statuses.rs
│   ├── summary.rs
│   └── templates/
│       ├── summary.cel.md.hbs
│       ├── summary.cel.txt.hbs
│       ├── summary.standard.md.hbs
│       └── summary.standard.txt.hbs
├── templates/
│   └── README.md.handlebars
└── tests/
    └── cel-condition.cel

================================================
FILE CONTENTS
================================================

================================================
FILE: .cargo/config.toml
================================================
[target.'cfg(target_os = "linux")']
rustflags = [
  "--cfg", "tokio_unstable",
  "-Crelocation-model=static",
]


================================================
FILE: .editorconfig
================================================
# https://editorconfig.org
root = true

[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true

[*.rs]
indent_size = 4

[*.hbs]
insert_final_newline = false


================================================
FILE: .envrc
================================================
use flake


================================================
FILE: .github/workflows/build.yaml
================================================
name: Build flake-checker artifacts

on:
  workflow_call:
  workflow_dispatch:

jobs:
  build-artifacts:
    runs-on: ${{ matrix.systems.runner }}
    permissions:
      id-token: write
      contents: read
    env:
      ARTIFACT_KEY: flake-checker-${{ matrix.systems.system }}
    strategy:
      matrix:
        systems:
          - nix-system: aarch64-darwin
            runner: macos-15
            system: ARM64-macOS
          - nix-system: aarch64-linux
            runner: ubuntu-24.04-arm
            system: ARM64-Linux
          - nix-system: x86_64-linux
            runner: ubuntu-24.04
            system: X64-Linux
    steps:
      - name: git checkout
        uses: actions/checkout@v6

      - name: Install Determinate Nix
        uses: DeterminateSystems/determinate-nix-action@main

      - name: Set up FlakeHub Cache
        uses: DeterminateSystems/flakehub-cache-action@main

      - name: Build and cache dev shell for ${{ matrix.systems.nix-system }} on ${{ matrix.systems.runner }}
        run: |
          nix build -L ".#devShells.${{ matrix.systems.nix-system }}.default"

      - name: Build package for ${{ matrix.systems.nix-system }}
        run: |
          nix build -L ".#packages.${{ matrix.systems.nix-system }}.default"
          cp ./result/bin/flake-checker flake-checker

      - name: Ensure that flake-checker binary is static on Linux
        if: contains(matrix.systems.nix-system, 'linux')
        run: |
          if file ./flake-checker | grep -E -q "static.+linked"; then
            echo "✅👍 STATIC"
          else
            echo "❌👎 DYNAMIC"
            exit 1
          fi

      - name: Upload flake-checker executable for ${{ matrix.systems.system }}
        uses: actions/upload-artifact@v4.3.3
        with:
          # Artifact name
          name: ${{ env.ARTIFACT_KEY }}
          path: flake-checker
          retention-days: 1


================================================
FILE: .github/workflows/ci.yaml
================================================
name: Flake checker CI

on:
  pull_request:
  push:
    branches: [main]

jobs:
  checks:
    name: Nix and Rust checks
    runs-on: ubuntu-24.04
    permissions:
      id-token: write
      contents: read
    steps:
      - uses: actions/checkout@v6
      - uses: DeterminateSystems/determinate-nix-action@main
      - uses: DeterminateSystems/flakehub-cache-action@main
      - name: Check flake.lock
        uses: DeterminateSystems/flake-checker-action@main
        with:
          fail-mode: true
      - name: Check Nix formatting
        run: nix develop -c check-nix-fmt
      - name: Check Rust formatting
        run: nix develop -c check-rust-fmt
      - name: Clippy
        run: nix develop -c cargo clippy

  rust-tests:
    name: Test Rust
    runs-on: ubuntu-24.04
    permissions:
      id-token: write
      contents: read
    steps:
      - uses: actions/checkout@v6
      - uses: DeterminateSystems/determinate-nix-action@main
      - uses: DeterminateSystems/flakehub-cache-action@main
      - name: cargo test
        run: nix develop -c cargo test

  check-flake-cel-condition:
    name: Check flake.lock test (CEL condition)
    runs-on: ubuntu-24.04
    permissions:
      id-token: write
      contents: read
    steps:
      - uses: actions/checkout@v6
      - uses: DeterminateSystems/determinate-nix-action@main
      - uses: DeterminateSystems/flakehub-cache-action@main
      - name: Check flake.lock
        run: |
          nix develop -c \
            cargo run -- \
              --condition "supportedRefs.contains(gitRef) && numDaysOld > 30 && owner == 'NixOS'" \
              ./tests/flake.cel.0.lock

  check-flake-dirty:
    name: Check flake.lock test (dirty 😈)
    runs-on: ubuntu-24.04
    permissions:
      id-token: write
      contents: read
    steps:
      - uses: actions/checkout@v6
      - uses: DeterminateSystems/determinate-nix-action@main
      - uses: DeterminateSystems/flakehub-cache-action@main
      - name: Check flake.lock
        run: |
          nix develop -c cargo run -- ./tests/flake.dirty.0.lock

  check-flake-clean:
    name: Check flake.lock test (clean 👼)
    runs-on: ubuntu-24.04
    permissions:
      id-token: write
      contents: read
    steps:
      - uses: actions/checkout@v6
      - uses: DeterminateSystems/determinate-nix-action@main
      - uses: DeterminateSystems/flakehub-cache-action@main
      - name: Check flake.lock
        run: |
          nix develop -c cargo run

  check-flake-dirty-fail-mode:
    name: Check flake.lock test (dirty 😈 plus fail mode activated)
    runs-on: ubuntu-24.04
    if: false
    permissions:
      id-token: write
      contents: read
    steps:
      - uses: actions/checkout@v6
      - uses: DeterminateSystems/determinate-nix-action@main
      - uses: DeterminateSystems/flakehub-cache-action@main
      - name: Check flake.lock
        run: |
          nix develop -c cargo run -- --fail-mode ./tests/flake.dirty.0.lock

  build-artifacts:
    name: Build artifacts
    needs: checks
    uses: ./.github/workflows/build.yaml
    secrets: inherit

  action-integration-test:
    name: Integration test for flake-checker-action
    needs: build-artifacts
    runs-on: ${{ matrix.systems.runner }}
    permissions:
      contents: read
      id-token: write
    env:
      ARTIFACT_KEY: flake-checker-${{ matrix.systems.system }}
    strategy:
      matrix:
        systems:
          - system: X64-Linux
            runner: ubuntu-24.04
          - system: ARM64-Linux
            runner: ubuntu-24.04-arm
          - system: ARM64-macOS
            runner: macos-15
    steps:
      - uses: actions/checkout@v6

      - name: Install Determinate Nix
        uses: DeterminateSystems/determinate-nix-action@main

      - name: Download flake-checker for ${{ matrix.systems.system }}
        uses: actions/download-artifact@v4.1.7
        with:
          name: ${{ env.ARTIFACT_KEY }}
          path: ${{ env.ARTIFACT_KEY }}

      - name: chmod flake-checker executable on ${{ matrix.systems.system }}
        run: |
          chmod +x "${{ env.ARTIFACT_KEY }}/flake-checker"

          file "${{ env.ARTIFACT_KEY }}/flake-checker"

      - name: Test flake-checker-action@main on ${{ matrix.systems.runner }}
        uses: DeterminateSystems/flake-checker-action@main
        with:
          source-binary: ${{ env.ARTIFACT_KEY }}/flake-checker


================================================
FILE: .github/workflows/flakehub-publish-tagged.yaml
================================================
name: "Publish tags to FlakeHub"

on:
  push:
    tags:
      - "v?[0-9]+.[0-9]+.[0-9]+*"
  workflow_dispatch:
    inputs:
      tag:
        description: "The existing tag to publish to FlakeHub"
        type: "string"
        required: true

jobs:
  flakehub-publish:
    runs-on: "ubuntu-latest"
    permissions:
      id-token: "write"
      contents: "read"
    steps:
      - uses: "actions/checkout@v6"
        with:
          ref: "${{ (inputs.tag != null) && format('refs/tags/{0}', inputs.tag) || '' }}"
      - uses: "DeterminateSystems/determinate-nix-action@main"
      - uses: "DeterminateSystems/flakehub-push@main"
        with:
          visibility: "public"
          name: "DeterminateSystems/flake-checker"
          tag: "${{ inputs.tag }}"
          include-output-paths: true


================================================
FILE: .github/workflows/ref-statuses.yaml
================================================
name: Check that ref statuses are up to date

on:
  schedule:
    - cron: "0 0 * * *" # Daily

jobs:
  check-ref-statuses:
    runs-on: ubuntu-latest
    permissions:
      id-token: write
      contents: write
      pull-requests: write
    steps:
      - uses: actions/checkout@v6

      - uses: DeterminateSystems/determinate-nix-action@main

      - uses: DeterminateSystems/flakehub-cache-action@main

      - name: Check ref statuses
        run: |
          nix develop --command cargo run --features ref-statuses -- --check-ref-statuses

      - name: Update ref-statuses.json
        if: failure()
        run: |
          ref_statuses_json=$(nix develop --command cargo run --features ref-statuses -- --get-ref-statuses | jq --sort-keys .)
          echo "${ref_statuses_json}" > ref-statuses.json

      - name: Update README in light of new list
        if: failure()
        run: |
          nix develop --command update-readme

      - name: Create pull request
        if: failure()
        uses: peter-evans/create-pull-request@v6
        with:
          commit-message: Update ref-statuses.json to new valid Git refs list and update README
          title: Update ref-statuses.json
          body: |
            Nixpkgs has changed its list of maintained references. This PR updates `ref-statuses.json` to reflect that change.
          branch: updated-ref-statuses
          base: main


================================================
FILE: .github/workflows/release-branches.yaml
================================================
name: Release Branch

on:
  push:
    branches:
      # NOTE: make sure any branches here are also valid directory names,
      # otherwise creating the directory and uploading to s3 will fail
      - "main"

jobs:
  build:
    uses: ./.github/workflows/build.yaml

  release:
    needs: build

    concurrency: release
    runs-on: ubuntu-latest
    permissions:
      contents: read
      id-token: write # In order to request a JWT for AWS auth
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Create the artifacts directory
        run: rm -rf ./artifacts && mkdir ./artifacts

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-ARM64-macOS
          path: cache-binary-ARM64-macOS
      - name: Persist the cache binary
        run: cp ./cache-binary-ARM64-macOS/flake-checker ./artifacts/ARM64-macOS

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-X64-Linux
          path: cache-binary-X64-Linux
      - name: Persist the cache binary
        run: cp ./cache-binary-X64-Linux/flake-checker ./artifacts/X64-Linux

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-ARM64-Linux
          path: cache-binary-ARM64-Linux
      - name: Persist the cache binary
        run: cp ./cache-binary-ARM64-Linux/flake-checker ./artifacts/ARM64-Linux

      - uses: DeterminateSystems/push-artifact-ids@main
        with:
          s3_upload_role: ${{ secrets.AWS_S3_UPLOAD_ROLE }}
          bucket: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
          directory: ./artifacts
          ids_project_name: flake-checker
          ids_binary_prefix: flake-checker


================================================
FILE: .github/workflows/release-prs.yaml
================================================
name: Release PR

on:
  pull_request:
    types:
      - opened
      - reopened
      - synchronize
      - labeled

jobs:
  build:
    # We want to build and upload artifacts only if the `upload to s3` label is applied
    # Only intra-repo PRs are allowed to have PR artifacts uploaded
    # We only want to trigger once the upload once in the case the upload label is added, not when any label is added
    if: |
      github.event.pull_request.head.repo.full_name == 'DeterminateSystems/flake-checker'
      && (
        (github.event.action == 'labeled' && github.event.label.name == 'upload to s3')
        || (github.event.action != 'labeled' && contains(github.event.pull_request.labels.*.name, 'upload to s3'))
      )
    uses: ./.github/workflows/build.yaml

  release:
    needs: build
    concurrency: release
    runs-on: ubuntu-latest
    permissions:
      id-token: write # In order to request a JWT for AWS auth
      contents: read
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Create the artifacts directory
        run: rm -rf ./artifacts && mkdir ./artifacts

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-ARM64-macOS
          path: cache-binary-ARM64-macOS
      - name: Persist the cache binary
        run: cp ./cache-binary-ARM64-macOS/flake-checker ./artifacts/ARM64-macOS

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-X64-Linux
          path: cache-binary-X64-Linux
      - name: Persist the cache binary
        run: cp ./cache-binary-X64-Linux/flake-checker ./artifacts/X64-Linux

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-ARM64-Linux
          path: cache-binary-ARM64-Linux
      - name: Persist the cache binary
        run: cp ./cache-binary-ARM64-Linux/flake-checker ./artifacts/ARM64-Linux

      - uses: DeterminateSystems/push-artifact-ids@main
        with:
          s3_upload_role: ${{ secrets.AWS_S3_UPLOAD_ROLE }}
          bucket: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
          directory: ./artifacts
          ids_project_name: flake-checker
          ids_binary_prefix: flake-checker


================================================
FILE: .github/workflows/release-tags.yaml
================================================
name: Release Tags

on:
  push:
    tags:
      - "v*.*.*"

jobs:
  build:
    uses: ./.github/workflows/build.yaml

  release:
    needs: build

    concurrency: release
    runs-on: ubuntu-latest
    permissions:
      contents: write # In order to upload artifacts to GitHub releases
      id-token: write # In order to request a JWT for AWS auth
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Create the artifacts directory
        run: rm -rf ./artifacts && mkdir ./artifacts

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-ARM64-macOS
          path: cache-binary-ARM64-macOS
      - name: Persist the cache binary
        run: cp ./cache-binary-ARM64-macOS/flake-checker ./artifacts/ARM64-macOS

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-X64-Linux
          path: cache-binary-X64-Linux
      - name: Persist the cache binary
        run: cp ./cache-binary-X64-Linux/flake-checker ./artifacts/X64-Linux

      - uses: actions/download-artifact@v4.1.7
        with:
          name: flake-checker-ARM64-Linux
          path: cache-binary-ARM64-Linux
      - name: Persist the cache binary
        run: cp ./cache-binary-ARM64-Linux/flake-checker ./artifacts/ARM64-Linux

      - uses: DeterminateSystems/push-artifact-ids@main
        with:
          s3_upload_role: ${{ secrets.AWS_S3_UPLOAD_ROLE }}
          bucket: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
          directory: ./artifacts
          ids_project_name: flake-checker
          ids_binary_prefix: flake-checker

      - name: Rename binaries for GH release
        run: |
          mv ./artifacts/{,flake-checker-}ARM64-macOS
          mv ./artifacts/{,flake-checker-}X64-Linux
          mv ./artifacts/{,flake-checker-}ARM64-Linux

      - name: Publish Release to GitHub (Tag)
        uses: softprops/action-gh-release@v1
        with:
          fail_on_unmatched_files: true
          draft: true
          files: |
            artifacts/**


================================================
FILE: .github/workflows/update-flake-lock.yaml
================================================
name: update-flake-lock

on:
  workflow_dispatch: # enable manual triggering
  schedule:
    - cron: "0 0 */15 * *" # every 15th day of the month

jobs:
  lockfile:
    runs-on: ubuntu-latest
    permissions:
      id-token: write
      contents: read
    steps:
      - uses: actions/checkout@v6
      - uses: DeterminateSystems/determinate-nix-action@main
      - uses: DeterminateSystems/flakehub-cache-action@main
      - uses: DeterminateSystems/update-flake-lock@main
        with:
          pr-title: "Update flake.lock"
          pr-labels: |
            dependencies
            automated
          inputs: |
            nixpkgs


================================================
FILE: .gitignore
================================================
# Rust artifacts
/target

# Nix artifacts
result

# Generated
summary.md
!src/templates/summary.md
src/policy.json

# Release script artifacts
releases
.direnv


================================================
FILE: Cargo.toml
================================================
[package]
name = "flake-checker"
version = "0.2.11"
edition = "2024"

[workspace]
resolver = "2"
members = [".", "parse-flake-lock"]

[workspace.dependencies]
serde = { version = "1.0.163", features = ["derive"] }
serde_json = { version = "1.0.100", default-features = false, features = [
  "std",
] }
thiserror = { version = "1.0.40", default-features = false }

[dependencies]
cel-interpreter = { version = "0.7.1", default-features = false }
chrono = { version = "0.4.25", default-features = false, features = ["clock"] }
clap = { version = "4.3.0", default-features = false, features = [
  "derive",
  "env",
  "std",
  "wrap_help",
] }
detsys-ids-client = { version = "0.6", features = ["tracing-instrument"] }
handlebars = { version = "4.3.7", default-features = false }
parse-flake-lock = { path = "./parse-flake-lock" }
reqwest = { version = "0.13", default-features = false, features = [
  "blocking",
  "json",
  "rustls",
] }
serde = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }
tokio = { version = "1", features = ["full", "tracing"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }

[features]
default = []
ref-statuses = []


================================================
FILE: LICENSE
================================================

                                 Apache License
                           Version 2.0, January 2004
                        http://www.apache.org/licenses/

   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

   1. Definitions.

      "License" shall mean the terms and conditions for use, reproduction,
      and distribution as defined by Sections 1 through 9 of this document.

      "Licensor" shall mean the copyright owner or entity authorized by
      the copyright owner that is granting the License.

      "Legal Entity" shall mean the union of the acting entity and all
      other entities that control, are controlled by, or are under common
      control with that entity. For the purposes of this definition,
      "control" means (i) the power, direct or indirect, to cause the
      direction or management of such entity, whether by contract or
      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      outstanding shares, or (iii) beneficial ownership of such entity.

      "You" (or "Your") shall mean an individual or Legal Entity
      exercising permissions granted by this License.

      "Source" form shall mean the preferred form for making modifications,
      including but not limited to software source code, documentation
      source, and configuration files.

      "Object" form shall mean any form resulting from mechanical
      transformation or translation of a Source form, including but
      not limited to compiled object code, generated documentation,
      and conversions to other media types.

      "Work" shall mean the work of authorship, whether in Source or
      Object form, made available under the License, as indicated by a
      copyright notice that is included in or attached to the work
      (an example is provided in the Appendix below).

      "Derivative Works" shall mean any work, whether in Source or Object
      form, that is based on (or derived from) the Work and for which the
      editorial revisions, annotations, elaborations, or other modifications
      represent, as a whole, an original work of authorship. For the purposes
      of this License, Derivative Works shall not include works that remain
      separable from, or merely link (or bind by name) to the interfaces of,
      the Work and Derivative Works thereof.

      "Contribution" shall mean any work of authorship, including
      the original version of the Work and any modifications or additions
      to that Work or Derivative Works thereof, that is intentionally
      submitted to Licensor for inclusion in the Work by the copyright owner
      or by an individual or Legal Entity authorized to submit on behalf of
      the copyright owner. For the purposes of this definition, "submitted"
      means any form of electronic, verbal, or written communication sent
      to the Licensor or its representatives, including but not limited to
      communication on electronic mailing lists, source code control systems,
      and issue tracking systems that are managed by, or on behalf of, the
      Licensor for the purpose of discussing and improving the Work, but
      excluding communication that is conspicuously marked or otherwise
      designated in writing by the copyright owner as "Not a Contribution."

      "Contributor" shall mean Licensor and any individual or Legal Entity
      on behalf of whom a Contribution has been received by Licensor and
      subsequently incorporated within the Work.

   2. Grant of Copyright License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      copyright license to reproduce, prepare Derivative Works of,
      publicly display, publicly perform, sublicense, and distribute the
      Work and such Derivative Works in Source or Object form.

   3. Grant of Patent License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      (except as stated in this section) patent license to make, have made,
      use, offer to sell, sell, import, and otherwise transfer the Work,
      where such license applies only to those patent claims licensable
      by such Contributor that are necessarily infringed by their
      Contribution(s) alone or by combination of their Contribution(s)
      with the Work to which such Contribution(s) was submitted. If You
      institute patent litigation against any entity (including a
      cross-claim or counterclaim in a lawsuit) alleging that the Work
      or a Contribution incorporated within the Work constitutes direct
      or contributory patent infringement, then any patent licenses
      granted to You under this License for that Work shall terminate
      as of the date such litigation is filed.

   4. Redistribution. You may reproduce and distribute copies of the
      Work or Derivative Works thereof in any medium, with or without
      modifications, and in Source or Object form, provided that You
      meet the following conditions:

      (a) You must give any other recipients of the Work or
          Derivative Works a copy of this License; and

      (b) You must cause any modified files to carry prominent notices
          stating that You changed the files; and

      (c) You must retain, in the Source form of any Derivative Works
          that You distribute, all copyright, patent, trademark, and
          attribution notices from the Source form of the Work,
          excluding those notices that do not pertain to any part of
          the Derivative Works; and

      (d) If the Work includes a "NOTICE" text file as part of its
          distribution, then any Derivative Works that You distribute must
          include a readable copy of the attribution notices contained
          within such NOTICE file, excluding those notices that do not
          pertain to any part of the Derivative Works, in at least one
          of the following places: within a NOTICE text file distributed
          as part of the Derivative Works; within the Source form or
          documentation, if provided along with the Derivative Works; or,
          within a display generated by the Derivative Works, if and
          wherever such third-party notices normally appear. The contents
          of the NOTICE file are for informational purposes only and
          do not modify the License. You may add Your own attribution
          notices within Derivative Works that You distribute, alongside
          or as an addendum to the NOTICE text from the Work, provided
          that such additional attribution notices cannot be construed
          as modifying the License.

      You may add Your own copyright statement to Your modifications and
      may provide additional or different license terms and conditions
      for use, reproduction, or distribution of Your modifications, or
      for any such Derivative Works as a whole, provided Your use,
      reproduction, and distribution of the Work otherwise complies with
      the conditions stated in this License.

   5. Submission of Contributions. Unless You explicitly state otherwise,
      any Contribution intentionally submitted for inclusion in the Work
      by You to the Licensor shall be under the terms and conditions of
      this License, without any additional terms or conditions.
      Notwithstanding the above, nothing herein shall supersede or modify
      the terms of any separate license agreement you may have executed
      with Licensor regarding such Contributions.

   6. Trademarks. This License does not grant permission to use the trade
      names, trademarks, service marks, or product names of the Licensor,
      except as required for reasonable and customary use in describing the
      origin of the Work and reproducing the content of the NOTICE file.

   7. Disclaimer of Warranty. Unless required by applicable law or
      agreed to in writing, Licensor provides the Work (and each
      Contributor provides its Contributions) on an "AS IS" BASIS,
      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      implied, including, without limitation, any warranties or conditions
      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      PARTICULAR PURPOSE. You are solely responsible for determining the
      appropriateness of using or redistributing the Work and assume any
      risks associated with Your exercise of permissions under this License.

   8. Limitation of Liability. In no event and under no legal theory,
      whether in tort (including negligence), contract, or otherwise,
      unless required by applicable law (such as deliberate and grossly
      negligent acts) or agreed to in writing, shall any Contributor be
      liable to You for damages, including any direct, indirect, special,
      incidental, or consequential damages of any character arising as a
      result of this License or out of the use or inability to use the
      Work (including but not limited to damages for loss of goodwill,
      work stoppage, computer failure or malfunction, or any and all
      other commercial damages or losses), even if such Contributor
      has been advised of the possibility of such damages.

   9. Accepting Warranty or Additional Liability. While redistributing
      the Work or Derivative Works thereof, You may choose to offer,
      and charge a fee for, acceptance of support, warranty, indemnity,
      or other liability obligations and/or rights consistent with this
      License. However, in accepting such obligations, You may act only
      on Your own behalf and on Your sole responsibility, not on behalf
      of any other Contributor, and only if You agree to indemnify,
      defend, and hold each Contributor harmless for any liability
      incurred by, or claims asserted against, such Contributor by reason
      of your accepting any such warranty or additional liability.

   END OF TERMS AND CONDITIONS

   APPENDIX: How to apply the Apache License to your work.

      To apply the Apache License to your work, attach the following
      boilerplate notice, with the fields enclosed by brackets "[]"
      replaced with your own identifying information. (Don't include
      the brackets!)  The text should be enclosed in the appropriate
      comment syntax for the file format. We also recommend that a
      file or class name and description of purpose be included on the
      same "printed page" as the copyright notice for easier
      identification within third-party archives.

   Copyright [yyyy] [name of copyright owner]

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.


================================================
FILE: README.md
================================================
# Nix Flake Checker

[![FlakeHub](https://img.shields.io/endpoint?url=https://flakehub.com/f/DeterminateSystems/flake-checker/badge)](https://flakehub.com/flake/DeterminateSystems/flake-checker)

**Nix Flake Checker** is a tool from [Determinate Systems][detsys] that performs "health" checks on the [`flake.lock`][lockfile] files in your [flake][flakes]-powered Nix projects.
Its goal is to help your Nix projects stay on recent and supported versions of [Nixpkgs].

To run the checker in the root of a Nix project:

```shell
nix run github:DeterminateSystems/flake-checker

# Or point to an explicit path for flake.lock
nix run github:DeterminateSystems/flake-checker /path/to/flake.lock
```

Nix Flake Checker looks at your `flake.lock`'s root-level [Nixpkgs] inputs.
There are two ways to express flake policies:

- Via [config parameters](#parameters).
- Via [policy conditions](#policy-conditions) using [Common Expression Language][cel] (CEL).

If you're running it locally, Nix Flake Checker reports any issues via text output in your terminal.
But you can also use Nix Flake Checker [in CI](#the-flake-checker-action).

## Supported branches

At any given time, [Nixpkgs] has a bounded set of branches that are considered _supported_.
The current list, with their statuses:

- `nixos-25.05`
- `nixos-25.05-small`
- `nixos-25.11`
- `nixos-25.11-small`
- `nixos-unstable`
- `nixos-unstable-small`
- `nixpkgs-25.05-darwin`
- `nixpkgs-25.11-darwin`
- `nixpkgs-unstable`

## Parameters

By default, Flake Checker verifies that:

- Any explicit Nixpkgs Git refs are in the [supported list](#supported-branches).
- Any Nixpkgs dependencies are less than 30 days old.
- Any Nixpkgs dependencies have the [`NixOS`][nixos-org] org as the GitHub owner (and thus that the dependency isn't a fork or non-upstream variant).

You can adjust this behavior via configuration (all are enabled by default but you can disable them):

| Flag                | Environment variable                | Action                                                     | Default |
| :------------------ | :---------------------------------- | :--------------------------------------------------------- | :------ |
| `--check-outdated`  | `NIX_FLAKE_CHECKER_CHECK_OUTDATED`  | Check for outdated Nixpkgs inputs                          | `true`  |
| `--check-owner`     | `NIX_FLAKE_CHECKER_CHECK_OWNER`     | Check that Nixpkgs inputs have `NixOS` as the GitHub owner | `true`  |
| `--check-supported` | `NIX_FLAKE_CHECKER_CHECK_SUPPORTED` | Check that Git refs for Nixpkgs inputs are supported       | `true`  |

## Policy conditions

You can apply a CEL condition to your flake using the `--condition` flag.
Here's an example:

```shell
flake-checker --condition "has(numDaysOld) && numDaysOld < 365"
```

This would check that each Nixpkgs input in your `flake.lock` is less than 365 days old.
These variables are available in each condition:

| Variable        | Description                                                                                                                              |
| :-------------- | :--------------------------------------------------------------------------------------------------------------------------------------- |
| `gitRef`        | The Git reference of the input.                                                                                                          |
| `numDaysOld`    | The number of days old the input is.                                                                                                     |
| `owner`         | The input's owner (if a GitHub input).                                                                                                   |
| `supportedRefs` | A list of [supported Git refs](#supported-branches) (all are branch names).                                                              |
| `refStatuses`   | A map. Each key is a branch name. Each value is a branch status (`"rolling"`, `"beta"`, `"stable"`, `"deprecated"` or `"unmaintained"`). |

We recommend a condition _at least_ this stringent:

```ruby
supportedRefs.contains(gitRef) && (has(numDaysOld) && numDaysOld < 30) && owner == 'NixOS'
```

Note that not all Nixpkgs inputs have a `numDaysOld` field, so make sure to ensure that that field exists when checking for the number of days.

Here are some other example conditions:

```ruby
# Updated in the last two weeks
supportedRefs.contains(gitRef) && (has(numDaysOld) && numDaysOld < 14) && owner == 'NixOS'

# Check for most recent stable Nixpkgs
gitRef.contains("24.05")
```

## The Nix Flake Checker Action

You can automate Nix Flake Checker by adding Determinate Systems' [Nix Flake Checker Action][action] to your GitHub Actions workflows:

```yaml
checks:
  steps:
    - uses: actions/checkout@v6
    - name: Check Nix flake Nixpkgs inputs
      uses: DeterminateSystems/flake-checker-action@main
```

When run in GitHub Actions, Nix Flake Checker always exits with a status code of 0 by default&mdash;and thus never fails your workflows&mdash;and reports its findings as a [Markdown summary][md].

## Telemetry

The goal of Nix Flake Checker is to help teams stay on recent and supported versions of Nixpkgs.
The flake checker collects a little bit of telemetry information to help us make that true.

To disable diagnostic reporting, set the diagnostics URL to an empty string by passing `--no-telemetry` or setting `FLAKE_CHECKER_NO_TELEMETRY=true`.

You can read the full privacy policy for [Determinate Systems][detsys], the creators of this tool and the [Determinate Nix Installer][installer], [here][privacy].

## Rust library

The Nix Flake Checker is written in [Rust].
This repo exposes a [`parse-flake-lock`](./parse-flake-lock) crate that you can use to parse [`flake.lock` files][lockfile] in your own Rust projects.
To add that dependency:

```toml
[dependencies]
parse-flake-lock = { git = "https://github.com/DeterminateSystems/flake-checker", branch = "main" }
```

Here's an example usage:

```rust
use std::path::Path;

use parse_flake_lock::{FlakeLock, FlakeLockParseError};

fn main() -> Result<(), FlakeLockParseError> {
    let flake_lock = FlakeLock::new(Path::new("flake.lock"))?;
    println!("flake.lock info:");
    println!("version: {version}", version=flake_lock.version);
    println!("root node: {root:?}", root=flake_lock.root);
    println!("all nodes: {nodes:?}", nodes=flake_lock.nodes);

    Ok(())
}
```

The `parse-flake-lock` crate doesn't yet exhaustively parse all input node types, instead using a "fallthrough" mechanism that parses input types that don't yet have explicit struct definitions to a [`serde_json::value::Value`][val].
If you'd like to help make the parser more exhaustive, [pull requests][prs] are quite welcome.

[action]: https://github.com/DeterminateSystems/flake-checker-action
[cel]: https://cel.dev
[detsys]: https://determinate.systems
[flakes]: https://zero-to-nix.com/concepts/flakes
[install]: https://zero-to-nix.com/start/install
[installer]: https://github.com/DeterminateSystems/nix-installer
[lockfile]: https://zero-to-nix.com/concepts/flakes#lockfile
[md]: https://github.blog/2022-05-09-supercharging-github-actions-with-job-summaries
[nixos-org]: https://github.com/NixOS
[nixpkgs]: https://github.com/NixOS/nixpkgs
[privacy]: https://determinate.systems/policies/privacy
[prs]: /pulls
[rust]: https://rust-lang.org
[telemetry]: https://github.com/DeterminateSystems/nix-flake-checker/blob/main/src/telemetry.rs#L29-L43
[val]: https://docs.rs/serde_json/latest/serde_json/value/enum.Value.html


================================================
FILE: flake.nix
================================================
{
  inputs = {
    nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1";

    fenix = {
      url = "https://flakehub.com/f/nix-community/fenix/0.1";
      inputs.nixpkgs.follows = "nixpkgs";
    };

    crane.url = "https://flakehub.com/f/ipetkov/crane/0";

    easy-template = {
      url = "https://flakehub.com/f/DeterminateSystems/easy-template/0";
      inputs.nixpkgs.follows = "nixpkgs";
    };
  };

  outputs =
    { self, ... }@inputs:
    let
      inherit (inputs.nixpkgs) lib;

      lastModifiedDate = self.lastModifiedDate or self.lastModified or "19700101";
      version = "${builtins.substring 0 8 lastModifiedDate}-${self.shortRev or "dirty"}";

      meta = (builtins.fromTOML (builtins.readFile ./Cargo.toml)).package;

      supportedSystems = [
        "x86_64-linux"
        "aarch64-linux"
        "aarch64-darwin"
      ];

      forAllSystems =
        f:
        lib.genAttrs supportedSystems (
          system:
          f {
            inherit system;
            pkgs = import inputs.nixpkgs {
              inherit system;
              overlays = [ self.overlays.default ];
            };
          }
        );

      staticTarget' =
        system:
        {
          "aarch64-linux" = "aarch64-unknown-linux-musl";
          "x86_64-linux" = "x86_64-unknown-linux-musl";
        }
        .${system} or null;

    in
    {
      packages = forAllSystems (
        { pkgs, system }:
        {
          default = self.packages.${system}.flake-checker;
          inherit (pkgs) flake-checker;
        }
      );

      devShells = forAllSystems (
        { pkgs, system }:
        {
          default =
            let
              staticTarget = staticTarget' system;
              pkgs' = if staticTarget != null then pkgs.pkgsStatic else pkgs;

              check-nix-fmt = pkgs.writeShellApplication {
                name = "check-nix-fmt";
                runtimeInputs = with pkgs; [
                  git
                  nixfmt
                ];
                text = ''
                  git ls-files '*.nix' | xargs nixfmt --check
                '';
              };
              check-rust-fmt = pkgs.writeShellApplication {
                name = "check-rust-fmt";
                runtimeInputs = with pkgs; [
                  rustToolchain
                ];
                text = "cargo fmt --check";
              };
              get-ref-statuses = pkgs.writeShellApplication {
                name = "get-ref-statuses";
                runtimeInputs = with pkgs; [
                  rustToolchain
                ];
                text = "cargo run --features ref-statuses -- --get-ref-statuses";
              };
              update-readme = pkgs.writeShellApplication {
                name = "update-readme";
                runtimeInputs = [
                  inputs.easy-template.packages.${system}.default
                  pkgs.jq
                ];
                text = ''
                  tmp=$(mktemp -d)
                  inputs="''${tmp}/template-inputs.json"

                  jq '{supported: .}' ./ref-statuses.json > "''${inputs}"
                  easy-template ./templates/README.md.handlebars "''${inputs}" > README.md

                  rm -rf "''${tmp}"
                '';
              };
            in
            pkgs'.mkShell {
              packages = with pkgs; [
                bashInteractive

                # Rust
                lld
                rustToolchain
                cargo-bloat
                cargo-edit
                cargo-machete
                cargo-watch

                # CI checks
                check-nix-fmt
                check-rust-fmt

                # Scripts
                get-ref-statuses
                update-readme

                self.formatter.${system}
              ];

              # Required by rust-analyzer
              env = {
                RUST_SRC_PATH = "${pkgs.rustToolchain}/lib/rustlib/src/rust/library";
              }
              // pkgs.env;
            };
        }
      );

      formatter = forAllSystems ({ pkgs, ... }: pkgs.nixfmt);

      overlays.default =
        final: prev:
        let
          meta = (builtins.fromTOML (builtins.readFile ./Cargo.toml)).package;

          inherit (prev.stdenv.hostPlatform) system;

          staticTarget = staticTarget' system;
          pkgs' = if staticTarget != null then final.pkgsStatic else final;

          rustToolchain =
            with inputs.fenix.packages.${system};
            combine (
              with stable;
              [
                clippy
                rustc
                cargo
                rustfmt
                rust-src
                rust-analyzer
              ]
              ++ lib.optionals (staticTarget != null) [
                targets.${staticTarget}.stable.rust-std
              ]
            );

          craneLib = (inputs.crane.mkLib pkgs').overrideToolchain (_: rustToolchain);

          rustTargetSpec = final.stdenv.hostPlatform.rust.rustcTargetSpec;
          rustTargetSpecEnv = lib.toUpper (builtins.replaceStrings [ "-" ] [ "_" ] rustTargetSpec);

          env = lib.optionalAttrs (staticTarget != null) {
            CARGO_BUILD_TARGET = staticTarget;
            "CARGO_TARGET_${rustTargetSpecEnv}_LINKER" = "${final.stdenv.cc.targetPrefix}cc";
          };
        in
        {
          flake-checker =
            let
              sharedAttrs = {
                inherit (meta) name;
                inherit version;

                src = builtins.path {
                  name = "flake-checker-src";
                  path = self;
                };

                depsBuildBuild = [
                  pkgs'.buildPackages.stdenv.cc
                  pkgs'.lld
                ];

                doIncludeCrossToolchainEnv = false;

                inherit env;
              };
            in
            craneLib.buildPackage (
              sharedAttrs
              // {
                cargoArtifacts = craneLib.buildDepsOnly sharedAttrs;

                disallowedReferences = lib.optionals final.stdenv.hostPlatform.isDarwin [
                  final.libiconv
                ];

                postFixup = lib.optionalString final.stdenv.hostPlatform.isDarwin ''
                  install_name_tool -change \
                    "$(otool -L $out/bin/flake-checker | grep libiconv | awk '{print $1}')" \
                    /usr/lib/libiconv.2.dylib \
                    $out/bin/flake-checker
                '';
              }
            );

          inherit env rustToolchain;
        };
    };
}


================================================
FILE: parse-flake-lock/Cargo.toml
================================================
[package]
name = "parse-flake-lock"
version = "0.1.1"
edition = "2021"

[dependencies]
serde = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }


================================================
FILE: parse-flake-lock/src/lib.rs
================================================
#![allow(dead_code)]

//! A library for parsing Nix [`flake.lock`][lock] files
//! into a structured Rust representation. [Determinate Systems][detsys] currently uses this library
//! for its [Nix Flake Checker][checker] and [Nix Flake Checker Action][action] but it's designed to
//! be generally useful.
//!
//! [action]: https://github.com/DeterminateSystems/flake-checker-action
//! [checker]: https://github.com/DeterminateSystems/flake-checker
//! [detsys]: https://determinate.systems
//! [lock]: https://zero-to-nix.com/concepts/flakes#lockfile

use std::collections::{HashMap, VecDeque};
use std::fmt;
use std::fs::read_to_string;
use std::path::{Path, PathBuf};

use serde::de::{self, MapAccess, Visitor};
use serde::{Deserialize, Deserializer};

/// A custom error type for the `parse-flake-lock` crate.
#[derive(Debug, thiserror::Error)]
pub enum FlakeLockParseError {
    /// The `flake.lock` can be parsed as JSON but is nonetheless invalid.
    #[error("invalid flake.lock file: {0}")]
    Invalid(String),
    /// The `flake.lock` file couldn't be found.
    #[error("couldn't find the flake.lock file: {0}")]
    NotFound(#[from] std::io::Error),
    /// The specified `flake.lock` file couldn't be parsed as JSON.
    #[error("couldn't parse the flake.lock file as json: {0}")]
    Json(#[from] serde_json::Error),
}

/// A Rust representation of a Nix [`flake.lock`
/// file](https://zero-to-nix.com/concepts/flakes#lockfile).
#[derive(Clone, Debug)]
pub struct FlakeLock {
    /// The `nodes` field of the `flake.lock`, representing all input [Node]s for the flake.
    pub nodes: HashMap<String, Node>,
    /// The `root` of the `flake.lock` with all input references resolved into the corresponding
    /// [Node]s represented by the `nodes` field.
    pub root: HashMap<String, Node>,
    /// The version of the `flake.lock` (incremented whenever the `flake.nix` dependencies are
    /// updated).
    pub version: usize,
}

/// A custom [Deserializer] for `flake.lock` files, which are standard JSON but require some special
/// logic to create a meaningful Rust representation.
impl<'de> Deserialize<'de> for FlakeLock {
    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
    where
        D: Deserializer<'de>,
    {
        #[derive(Deserialize)]
        #[serde(field_identifier, rename_all = "lowercase")]
        enum Field {
            Nodes,
            Root,
            Version,
        }

        struct FlakeLockVisitor;

        impl<'de> Visitor<'de> for FlakeLockVisitor {
            type Value = FlakeLock;

            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
                formatter.write_str("struct FlakeLock")
            }

            fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
            where
                V: MapAccess<'de>,
            {
                let mut nodes = None;
                let mut root = None;
                let mut version = None;
                while let Some(key) = map.next_key()? {
                    match key {
                        Field::Nodes => {
                            if nodes.is_some() {
                                return Err(de::Error::duplicate_field("nodes"));
                            }
                            nodes = Some(map.next_value()?);
                        }
                        Field::Root => {
                            if root.is_some() {
                                return Err(de::Error::duplicate_field("root"));
                            }
                            root = Some(map.next_value()?);
                        }
                        Field::Version => {
                            if version.is_some() {
                                return Err(de::Error::duplicate_field("version"));
                            }
                            version = Some(map.next_value()?);
                        }
                    }
                }
                let nodes: HashMap<String, Node> =
                    nodes.ok_or_else(|| de::Error::missing_field("nodes"))?;
                let root: String = root.ok_or_else(|| de::Error::missing_field("root"))?;
                let version: usize = version.ok_or_else(|| de::Error::missing_field("version"))?;

                let mut root_nodes = HashMap::new();
                let root_node = &nodes[&root];
                let Node::Root(root_node) = root_node else {
                    return Err(de::Error::custom(format!(
                        "root node was not a Root node, but was a {} node",
                        root_node.variant()
                    )));
                };

                for (root_name, root_input) in root_node.inputs.iter() {
                    let inputs: VecDeque<String> = match root_input.clone() {
                        Input::String(s) => [s].into(),
                        Input::List(keys) => keys.into(),
                    };

                    let real_node = chase_input_node(&nodes, inputs).map_err(|e| {
                        de::Error::custom(format!("failed to chase input {}: {:?}", root_name, e))
                    })?;
                    root_nodes.insert(root_name.clone(), real_node.clone());
                }

                Ok(FlakeLock {
                    nodes,
                    root: root_nodes,
                    version,
                })
            }
        }

        deserializer.deserialize_any(FlakeLockVisitor)
    }
}

fn chase_input_node(
    nodes: &HashMap<String, Node>,
    mut inputs: VecDeque<String>,
) -> Result<&Node, FlakeLockParseError> {
    let Some(next_input) = inputs.pop_front() else {
        unreachable!("there should always be at least one input");
    };

    let mut node = &nodes[&next_input];
    for input in inputs {
        let maybe_node_inputs = match node {
            Node::Root(_) => None,
            Node::Repo(node) => node.inputs.to_owned(),
            Node::Indirect(node) => node.inputs.to_owned(),
            Node::Path(node) => node.inputs.to_owned(),
            Node::Tarball(node) => node.inputs.to_owned(),
            Node::Fallthrough(node) => match node.get("inputs") {
                Some(node_inputs) => serde_json::from_value(node_inputs.clone())
                    .map_err(FlakeLockParseError::Json)?,
                None => None,
            },
        };

        let node_inputs = match maybe_node_inputs {
            Some(node_inputs) => node_inputs,
            None => {
                return Err(FlakeLockParseError::Invalid(format!(
                    "lock node should have had some inputs but had none:\n{:?}",
                    node
                )));
            }
        };

        let next_inputs = &node_inputs[&input];
        node = match next_inputs {
            Input::String(s) => &nodes[s],
            Input::List(inputs) => chase_input_node(nodes, inputs.to_owned().into())?,
        };
    }

    Ok(node)
}

impl FlakeLock {
    /// Instantiate a new [FlakeLock] from the provided [Path].
    pub fn new(path: &Path) -> Result<Self, FlakeLockParseError> {
        let flake_lock_file = read_to_string(path)?;
        let flake_lock: FlakeLock = serde_json::from_str(&flake_lock_file)?;
        Ok(flake_lock)
    }
}

/// A flake input [node]. This enum represents two concrete node types, [RepoNode] and [RootNode],
/// and uses the `Fallthrough` variant to capture node types that don't have explicitly defined
/// structs in this library, representing them as raw [Value][serde_json::value::Value]s.
///
/// [node]: https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#lock-files
#[derive(Clone, Debug, Deserialize)]
#[serde(untagged)]
pub enum Node {
    /// A [RootNode] specifying an [Input] map.
    Root(RootNode),
    /// A [RepoNode] flake input for a [Git](https://git-scm.com) repository (or another version
    /// control system).
    Repo(Box<RepoNode>),
    /// An [IndirectNode] flake input stemming from an indirect flake reference like `inputs.nixpkgs.url =
    /// "nixpkgs";`.
    Indirect(IndirectNode),
    /// A [PathNode] flake input stemming from a filesystem path.
    Path(PathNode),
    /// Nodes that point to tarball paths.
    Tarball(TarballNode),
    /// A "catch-all" variant for node types that don't (yet) have explicit struct definitions in
    /// this crate.
    Fallthrough(serde_json::value::Value), // Covers all other node types
}

// A string representation of the node variant (for logging).
impl Node {
    fn variant(&self) -> &'static str {
        match self {
            Node::Root(_) => "Root",
            Node::Repo(_) => "Repo",
            Node::Indirect(_) => "Indirect",
            Node::Path(_) => "Path",
            Node::Tarball(_) => "Tarball",
            Node::Fallthrough(_) => "Fallthrough", // Covers all other node types
        }
    }
}

/// An enum type representing node input references.
#[derive(Clone, Debug, Deserialize)]
#[serde(untagged)]
pub enum Input {
    /// An input expressed as a string.
    String(String),
    /// An input expressed as a list of strings.
    List(Vec<String>),
}

/// A flake [Node] representing a raw mapping of strings to [Input]s.
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RootNode {
    /// A mapping of the flake's input [Node]s.
    pub inputs: HashMap<String, Input>,
}

/// A [Node] representing a [Git](https://git-scm.com) repository (or another version control
/// system).
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RepoNode {
    /// Whether the input is itself a flake.
    pub flake: Option<bool>,
    /// The node's inputs.
    pub inputs: Option<HashMap<String, Input>>,
    /// The "locked" attributes of the input (set by Nix).
    pub locked: RepoLocked,
    /// The "original" (user-supplied) attributes of the repository input.
    pub original: RepoOriginal,
}

/// Information about the repository input that's "locked" because it's supplied by Nix.
#[derive(Clone, Debug, Deserialize)]
pub struct RepoLocked {
    /// The timestamp for when the input was last modified.
    #[serde(alias = "lastModified")]
    pub last_modified: i64,
    /// The NAR hash of the input.
    #[serde(alias = "narHash")]
    pub nar_hash: Option<String>,
    /// The repository owner.
    pub owner: String,
    /// The repository.
    pub repo: String,
    /// The Git revision.
    pub rev: String,
    /// The type of the node (either `"repo"` or `"indirect"`).
    #[serde(alias = "type")]
    pub node_type: String,
}

/// The `original` field of a [Repo][Node::Repo] node.
#[derive(Clone, Debug, Deserialize)]
pub struct RepoOriginal {
    /// The repository owner.
    pub owner: String,
    /// The repository.
    pub repo: String,
    /// The Git reference of the input.
    #[serde(alias = "ref")]
    pub git_ref: Option<String>,
    /// The type of the node (always `"repo"`).
    #[serde(alias = "type")]
    pub node_type: String,
}

/// An indirect flake input (using the [flake
/// registry](https://nixos.org/manual/nix/stable/command-ref/conf-file.html#conf-flake-registry)).
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct IndirectNode {
    /// The "locked" attributes of the input (set by Nix).
    pub locked: RepoLocked,
    /// The node's inputs.
    pub inputs: Option<HashMap<String, Input>>,
    /// The "original" (user-supplied) attributes of the indirect flake registry input.
    pub original: IndirectOriginal,
}

/// The `original` field of an [Indirect][Node::Indirect] node.
#[derive(Clone, Debug, Deserialize)]
pub struct IndirectOriginal {
    /// The ID of the input (recognized by the [flake
    /// registry]((https://nixos.org/manual/nix/stable/command-ref/conf-file.html#conf-flake-registry))).
    pub id: String,
    /// The type of the node (always `"indirect"`).
    #[serde(alias = "type")]
    pub node_type: String,
}

/// A flake input as a filesystem path, e.g. `inputs.local.url = "path:./subdir";`.
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct PathNode {
    /// The "locked" attributes of the input (set by Nix).
    pub locked: PathLocked,
    /// The node's inputs.
    pub inputs: Option<HashMap<String, Input>>,
    /// The "original" (user-supplied) attributes of the path input.
    pub original: PathOriginal,
}

/// Information about the path input that's "locked" because it's supplied by Nix.
#[derive(Clone, Debug, Deserialize)]
pub struct PathLocked {
    /// The timestamp for when the input was last modified.
    #[serde(alias = "lastModified")]
    pub last_modified: i64,
    /// The NAR hash of the input.
    #[serde(alias = "narHash")]
    pub nar_hash: Option<String>,
    /// The relative filesystem path for the input.
    pub path: PathBuf,
    /// The type of the node (always `"path"`).
    #[serde(alias = "type")]
    pub node_type: String,
}

/// The user-supplied path input info.
#[derive(Clone, Debug, Deserialize)]
pub struct PathOriginal {
    /// The relative filesystem path for the input.
    pub path: PathBuf,
    /// The Git reference of the input.
    #[serde(alias = "ref")]
    pub git_ref: Option<String>,
    /// The type of the node (always `"path"`).
    #[serde(alias = "type")]
    pub node_type: String,
}

/// A flake input as a tarball URL.
#[derive(Clone, Debug, Deserialize)]
pub struct TarballNode {
    /// The "locked" attributes of the input (set by Nix).
    pub locked: TarballLocked,
    /// The node's inputs.
    pub inputs: Option<HashMap<String, Input>>,
    /// The "original" (user-supplied) attributes of the tarball input.
    pub original: TarballOriginal,
}

/// Information about the tarball input that's "locked" because it's supplied by Nix.
#[derive(Clone, Debug, Deserialize)]
pub struct TarballLocked {
    /// The timestamp for when the input was last modified.
    #[serde(alias = "lastModified")]
    pub last_modified: Option<i64>,
    /// The NAR hash of the input.
    #[serde(alias = "narHash")]
    pub nar_hash: Option<String>,
    /// The type of the node (always `"tarball"`).
    #[serde(alias = "type")]
    pub node_type: String,
    /// The URL used to fetch the tarball.
    pub url: String,
}

/// The user-supplied tarball input info.
#[derive(Clone, Debug, Deserialize)]
pub struct TarballOriginal {
    /// The URL for the tarball input.
    pub url: String,
    /// The type of the node (always `"tarball"`).
    #[serde(alias = "type")]
    pub node_type: String,
}


================================================
FILE: ref-statuses.json
================================================
{
  "nixos-25.05": "unmaintained",
  "nixos-25.05-small": "unmaintained",
  "nixos-25.11": "stable",
  "nixos-25.11-small": "stable",
  "nixos-unstable": "rolling",
  "nixos-unstable-small": "rolling",
  "nixpkgs-25.05-darwin": "unmaintained",
  "nixpkgs-25.11-darwin": "stable",
  "nixpkgs-unstable": "rolling"
}


================================================
FILE: src/condition.rs
================================================
use cel_interpreter::{Context, Program, Value};
use parse_flake_lock::{FlakeLock, Node};

use std::collections::{BTreeMap, HashMap};

use crate::{
    error::FlakeCheckerError,
    flake::{nixpkgs_deps, num_days_old},
    issue::{Issue, IssueKind},
};

const KEY_GIT_REF: &str = "gitRef";
const KEY_NUM_DAYS_OLD: &str = "numDaysOld";
const KEY_OWNER: &str = "owner";
const KEY_REF_STATUSES: &str = "refStatuses";
const KEY_SUPPORTED_REFS: &str = "supportedRefs";

pub(super) fn evaluate_condition(
    flake_lock: &FlakeLock,
    nixpkgs_keys: &[String],
    condition: &str,
    ref_statuses: BTreeMap<String, String>,
    supported_refs: Vec<String>,
) -> Result<Vec<Issue>, FlakeCheckerError> {
    let mut issues: Vec<Issue> = vec![];
    let mut ctx = Context::default();

    let ref_statuses = ref_statuses
        .into_iter()
        .collect::<HashMap<String, String>>();
    ctx.add_variable_from_value(KEY_REF_STATUSES, ref_statuses);
    ctx.add_variable_from_value(KEY_SUPPORTED_REFS, supported_refs);

    let deps = nixpkgs_deps(flake_lock, nixpkgs_keys)?;

    for (name, node) in deps {
        let (git_ref, last_modified, owner) = match node {
            Node::Repo(repo) => (
                repo.original.git_ref,
                Some(repo.locked.last_modified),
                Some(repo.original.owner),
            ),
            Node::Tarball(tarball) => (None, tarball.locked.last_modified, None),
            _ => (None, None, None),
        };

        add_cel_variables(&mut ctx, git_ref, last_modified, owner);

        match Program::compile(condition)?.execute(&ctx) {
            Ok(result) => match result {
                Value::Bool(b) if !b => {
                    issues.push(Issue {
                        input: name.clone(),
                        kind: IssueKind::Violation,
                    });
                }
                Value::Bool(b) if b => continue,
                result => {
                    return Err(FlakeCheckerError::NonBooleanCondition(
                        result.type_of().to_string(),
                    ));
                }
            },
            Err(e) => return Err(FlakeCheckerError::CelExecution(e)),
        }
    }

    Ok(issues)
}

fn add_cel_variables(
    ctx: &mut Context,
    git_ref: Option<String>,
    last_modified: Option<i64>,
    owner: Option<String>,
) {
    ctx.add_variable_from_value(KEY_GIT_REF, value_or_empty_string(git_ref));
    ctx.add_variable_from_value(
        KEY_NUM_DAYS_OLD,
        value_or_zero(last_modified.map(num_days_old)),
    );
    ctx.add_variable_from_value(KEY_OWNER, value_or_empty_string(owner));
}

fn value_or_empty_string(value: Option<String>) -> Value {
    Value::from(value.unwrap_or(String::from("")))
}

fn value_or_zero(value: Option<i64>) -> Value {
    Value::from(value.unwrap_or(0))
}


================================================
FILE: src/error.rs
================================================
#[derive(Debug, thiserror::Error)]
pub enum FlakeCheckerError {
    #[error("CEL execution error: {0}")]
    CelExecution(#[from] cel_interpreter::ExecutionError),
    #[error("CEL parsing error: {0}")]
    CelParse(#[from] cel_interpreter::ParseError),
    #[error("env var error: {0}")]
    EnvVar(#[from] std::env::VarError),
    #[error("couldn't parse flake.lock: {0}")]
    FlakeLock(#[from] parse_flake_lock::FlakeLockParseError),
    #[error("http client error: {0}")]
    Http(#[from] reqwest::Error),
    #[error("CEL conditions must return a Boolean but returned {0} instead")]
    NonBooleanCondition(String),
    #[error("couldn't access flake.lock: {0}")]
    Io(#[from] std::io::Error),
    #[error("couldn't parse flake.lock: {0}")]
    Json(#[from] serde_json::Error),
    #[error("handlebars render error: {0}")]
    Render(#[from] handlebars::RenderError),
    #[error("handlebars template error: {0}")]
    Template(#[from] Box<handlebars::TemplateError>),
    #[error("invalid flake.lock: {0}")]
    Invalid(String),
}


================================================
FILE: src/flake.rs
================================================
#![allow(dead_code)]

use std::collections::BTreeMap;

use crate::FlakeCheckerError;
use crate::issue::{Disallowed, Issue, IssueKind, NonUpstream, Outdated};

use chrono::{Duration, Utc};
use parse_flake_lock::{FlakeLock, Node};

pub const MAX_DAYS: i64 = 30;

pub(crate) struct FlakeCheckConfig {
    pub check_supported: bool,
    pub check_outdated: bool,
    pub check_owner: bool,
    pub fail_mode: bool,
    pub nixpkgs_keys: Vec<String>,
}

impl Default for FlakeCheckConfig {
    fn default() -> Self {
        Self {
            check_supported: true,
            check_outdated: true,
            check_owner: true,
            fail_mode: false,
            nixpkgs_keys: vec![String::from("nixpkgs")],
        }
    }
}

pub(super) fn nixpkgs_deps(
    flake_lock: &FlakeLock,
    keys: &[String],
) -> Result<BTreeMap<String, Node>, FlakeCheckerError> {
    let mut deps: BTreeMap<String, Node> = BTreeMap::new();

    for (ref key, node) in flake_lock.root.clone() {
        match &node {
            Node::Repo(_) => {
                if keys.contains(key) {
                    deps.insert(key.to_string(), node);
                }
            }
            Node::Tarball(_) => {
                if keys.contains(key) {
                    deps.insert(key.to_string(), node);
                }
            }
            Node::Indirect(indirect_node) => {
                if keys.contains(key) && &indirect_node.original.id == key {
                    deps.insert(key.to_string(), node);
                }
            }
            _ => {
                // NOTE: it's unclear that a path node for Nixpkgs should be accepted
            }
        }
    }
    let missing: Vec<String> = keys
        .iter()
        .filter(|k| !deps.contains_key(*k))
        .map(String::from)
        .collect();

    if !missing.is_empty() {
        let error_msg = format!(
            "no nixpkgs dependency found for specified {}: {}",
            if missing.len() > 1 { "keys" } else { "key" },
            missing.join(", ")
        );
        return Err(FlakeCheckerError::Invalid(error_msg));
    }

    Ok(deps)
}

pub(crate) fn check_flake_lock(
    flake_lock: &FlakeLock,
    config: &FlakeCheckConfig,
    allowed_refs: Vec<String>,
) -> Result<Vec<Issue>, FlakeCheckerError> {
    let mut issues = vec![];

    let deps = nixpkgs_deps(flake_lock, &config.nixpkgs_keys)?;

    for (name, node) in deps {
        let (git_ref, last_modified, owner) = match node {
            Node::Repo(repo) => (
                repo.original.git_ref,
                Some(repo.locked.last_modified),
                Some(repo.original.owner),
            ),
            Node::Tarball(tarball) => (None, tarball.locked.last_modified, None),
            _ => (None, None, None),
        };

        // Check if not explicitly supported
        if let Some(git_ref) = git_ref {
            // Check if not explicitly supported
            if config.check_supported && !allowed_refs.contains(&git_ref) {
                issues.push(Issue {
                    input: name.clone(),
                    kind: IssueKind::Disallowed(Disallowed {
                        reference: git_ref.to_string(),
                    }),
                });
            }
        }

        if let Some(last_modified) = last_modified {
            // Check if outdated
            if config.check_outdated {
                let num_days_old = num_days_old(last_modified);

                if num_days_old > MAX_DAYS {
                    issues.push(Issue {
                        input: name.clone(),
                        kind: IssueKind::Outdated(Outdated { num_days_old }),
                    });
                }
            }
        }

        if let Some(owner) = owner {
            // Check that the GitHub owner is NixOS
            if config.check_owner && owner.to_lowercase() != "nixos" {
                issues.push(Issue {
                    input: name.clone(),
                    kind: IssueKind::NonUpstream(NonUpstream { owner }),
                });
            }
        }
    }
    Ok(issues)
}

pub(super) fn num_days_old(timestamp: i64) -> i64 {
    let now_timestamp = Utc::now().timestamp();
    let diff = now_timestamp - timestamp;
    Duration::seconds(diff).num_days()
}

#[cfg(test)]
mod test {
    use std::collections::BTreeMap;
    use std::path::PathBuf;

    use crate::{
        FlakeCheckConfig, FlakeLock, check_flake_lock,
        condition::evaluate_condition,
        issue::{Disallowed, Issue, IssueKind, NonUpstream},
        supported_refs,
    };

    #[test]
    fn cel_conditions() {
        // (condition, expected)
        let cases: Vec<(&str, bool)> = vec![
            (include_str!("../tests/cel-condition.cel"), true),
            (
                "has(gitRef) && has(numDaysOld) && has(owner) && has(supportedRefs) && supportedRefs.contains(gitRef) && owner != 'NixOS'",
                false,
            ),
            (
                "has(gitRef) && has(numDaysOld) && has(owner) && has(supportedRefs) && supportedRefs.contains(gitRef) && owner != 'NixOS'",
                false,
            ),
        ];

        let ref_statuses: BTreeMap<String, String> =
            serde_json::from_str(include_str!("../ref-statuses.json")).unwrap();
        let supported_refs = supported_refs(ref_statuses.clone());
        let path = PathBuf::from("tests/flake.cel.0.lock");

        for (condition, expected) in cases {
            let flake_lock = FlakeLock::new(&path).unwrap();
            let config = FlakeCheckConfig {
                nixpkgs_keys: vec![String::from("nixpkgs")],
                ..Default::default()
            };

            let result = evaluate_condition(
                &flake_lock,
                &config.nixpkgs_keys,
                condition,
                ref_statuses.clone(),
                supported_refs.clone(),
            );

            if expected {
                println!("{result:?}");

                assert!(result.is_ok());
                assert!(result.unwrap().is_empty());
            } else {
                assert!(!result.unwrap().is_empty());
            }
        }
    }

    #[test]
    fn clean_flake_locks() {
        let ref_statuses: BTreeMap<String, String> =
            serde_json::from_str(include_str!("../ref-statuses.json")).unwrap();
        let allowed_refs = supported_refs(ref_statuses);
        for n in 0..=7 {
            let path = PathBuf::from(format!("tests/flake.clean.{n}.lock"));
            let flake_lock = FlakeLock::new(&path).unwrap();
            let config = FlakeCheckConfig {
                check_outdated: false,
                ..Default::default()
            };
            let issues = check_flake_lock(&flake_lock, &config, allowed_refs.clone())
                .unwrap_or_else(|_| panic!("couldn't run check_flake_lock function in {path:?}"));
            assert!(
                issues.is_empty(),
                "expected clean flake.lock in tests/flake.clean.{n}.lock but encountered an issue"
            );
        }
    }

    #[test]
    fn dirty_flake_locks() {
        let ref_statuses: BTreeMap<String, String> =
            serde_json::from_str(include_str!("../ref-statuses.json")).unwrap();
        let allowed_refs = supported_refs(ref_statuses);
        let cases: Vec<(&str, Vec<Issue>)> = vec![
            (
                "flake.dirty.0.lock",
                vec![
                    Issue {
                        input: String::from("nixpkgs"),
                        kind: IssueKind::Disallowed(Disallowed {
                            reference: String::from("this-should-fail"),
                        }),
                    },
                    Issue {
                        input: String::from("nixpkgs"),
                        kind: IssueKind::NonUpstream(NonUpstream {
                            owner: String::from("bitcoin-miner-org"),
                        }),
                    },
                ],
            ),
            (
                "flake.dirty.1.lock",
                vec![
                    Issue {
                        input: String::from("nixpkgs"),
                        kind: IssueKind::Disallowed(Disallowed {
                            reference: String::from("probably-nefarious"),
                        }),
                    },
                    Issue {
                        input: String::from("nixpkgs"),
                        kind: IssueKind::NonUpstream(NonUpstream {
                            owner: String::from("pretty-shady"),
                        }),
                    },
                ],
            ),
        ];

        for (file, expected_issues) in cases {
            let path = PathBuf::from(format!("tests/{file}"));
            let flake_lock = FlakeLock::new(&path).unwrap();
            let config = FlakeCheckConfig {
                check_outdated: false,
                ..Default::default()
            };
            let issues = check_flake_lock(&flake_lock, &config, allowed_refs.clone()).unwrap();
            dbg!(&path);
            assert_eq!(issues, expected_issues);
        }
    }

    #[test]
    fn explicit_nixpkgs_keys() {
        let ref_statuses: BTreeMap<String, String> =
            serde_json::from_str(include_str!("../ref-statuses.json")).unwrap();
        let allowed_refs = supported_refs(ref_statuses);
        let cases: Vec<(&str, Vec<String>, Vec<Issue>)> = vec![(
            "flake.explicit-keys.0.lock",
            vec![String::from("nixpkgs"), String::from("nixpkgs-alt")],
            vec![Issue {
                input: String::from("nixpkgs-alt"),
                kind: IssueKind::NonUpstream(NonUpstream {
                    owner: String::from("seems-pretty-shady"),
                }),
            }],
        )];

        for (file, nixpkgs_keys, expected_issues) in cases {
            let path = PathBuf::from(format!("tests/{file}"));
            let flake_lock = FlakeLock::new(&path).unwrap();
            let config = FlakeCheckConfig {
                check_outdated: false,
                nixpkgs_keys,
                ..Default::default()
            };
            let issues = check_flake_lock(&flake_lock, &config, allowed_refs.clone()).unwrap();
            assert_eq!(issues, expected_issues);
        }
    }

    #[test]
    fn missing_nixpkgs_keys() {
        let ref_statuses: BTreeMap<String, String> =
            serde_json::from_str(include_str!("../ref-statuses.json")).unwrap();
        let allowed_refs = supported_refs(ref_statuses);
        let cases: Vec<(&str, Vec<String>, String)> = vec![
            (
                "flake.clean.0.lock",
                vec![
                    String::from("nixpkgs"),
                    String::from("foo"),
                    String::from("bar"),
                ],
                String::from(
                    "invalid flake.lock: no nixpkgs dependency found for specified keys: foo, bar",
                ),
            ),
            (
                "flake.clean.1.lock",
                vec![String::from("nixpkgs"), String::from("nixpkgs-other")],
                String::from(
                    "invalid flake.lock: no nixpkgs dependency found for specified key: nixpkgs-other",
                ),
            ),
        ];
        for (file, nixpkgs_keys, expected_err) in cases {
            let path = PathBuf::from(format!("tests/{file}"));
            let flake_lock = FlakeLock::new(&path).unwrap();
            let config = FlakeCheckConfig {
                check_outdated: false,
                nixpkgs_keys,
                ..Default::default()
            };

            let result = check_flake_lock(&flake_lock, &config, allowed_refs.clone());

            assert!(result.is_err());
            assert_eq!(result.unwrap_err().to_string(), expected_err);
        }
    }
}


================================================
FILE: src/issue.rs
================================================
use serde::Serialize;

#[derive(Clone, Debug, PartialEq, Serialize)]
pub(crate) struct Issue {
    pub input: String,
    pub kind: IssueKind,
}

#[derive(Clone, Debug, PartialEq, Serialize)]
#[serde(untagged)]
pub(crate) enum IssueKind {
    Disallowed(Disallowed),
    Outdated(Outdated),
    NonUpstream(NonUpstream),
    Violation,
}

#[derive(Clone, Debug, PartialEq, Serialize)]
pub(crate) struct Disallowed {
    pub(crate) reference: String,
}

#[derive(Clone, Debug, PartialEq, Serialize)]
pub(crate) struct Outdated {
    pub(crate) num_days_old: i64,
}

#[derive(Clone, Debug, PartialEq, Serialize)]
pub(crate) struct NonUpstream {
    pub(crate) owner: String,
}

impl IssueKind {
    pub(crate) fn is_disallowed(&self) -> bool {
        matches!(self, Self::Disallowed(_))
    }

    pub(crate) fn is_outdated(&self) -> bool {
        matches!(self, Self::Outdated(_))
    }

    pub(crate) fn is_non_upstream(&self) -> bool {
        matches!(self, Self::NonUpstream(_))
    }

    pub(crate) fn is_violation(&self) -> bool {
        matches!(self, Self::Violation)
    }
}


================================================
FILE: src/main.rs
================================================
mod condition;
mod error;
mod flake;
mod issue;
mod summary;

#[cfg(feature = "ref-statuses")]
mod ref_statuses;

use std::collections::BTreeMap;
use std::path::PathBuf;
use std::process::ExitCode;

use clap::Parser;
use parse_flake_lock::FlakeLock;
use tracing_subscriber::{EnvFilter, fmt, prelude::*};

use crate::condition::evaluate_condition;
use error::FlakeCheckerError;
use flake::{FlakeCheckConfig, check_flake_lock};
use summary::Summary;

/// A flake.lock checker for Nix projects.
#[cfg(not(feature = "ref-statuses"))]
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
    /// Don't send aggregate sums of each issue type.
    ///
    /// See <https://github.com/determinateSystems/flake-checker>.
    #[arg(long, env = "NIX_FLAKE_CHECKER_NO_TELEMETRY", default_value_t = false)]
    no_telemetry: bool,

    /// Check for outdated Nixpkgs inputs.
    #[arg(long, env = "NIX_FLAKE_CHECKER_CHECK_OUTDATED", default_value_t = true)]
    check_outdated: bool,

    /// Check that Nixpkgs inputs have "NixOS" as the GitHub owner.
    #[arg(long, env = "NIX_FLAKE_CHECKER_CHECK_OWNER", default_value_t = true)]
    check_owner: bool,

    /// Check that Git refs for Nixpkgs inputs are supported.
    #[arg(
        long,
        env = "NIX_FLAKE_CHECKER_CHECK_SUPPORTED",
        default_value_t = true
    )]
    check_supported: bool,

    /// Ignore a missing flake.lock file.
    #[arg(
        long,
        env = "NIX_FLAKE_CHECKER_IGNORE_MISSING_FLAKE_LOCK",
        default_value_t = true
    )]
    ignore_missing_flake_lock: bool,

    /// The path to the flake.lock file to check.
    #[arg(
        env = "NIX_FLAKE_CHECKER_FLAKE_LOCK_PATH",
        default_value = "flake.lock"
    )]
    flake_lock_path: PathBuf,

    /// Fail with an exit code of 1 if any issues are encountered.
    #[arg(
        long,
        short,
        env = "NIX_FLAKE_CHECKER_FAIL_MODE",
        default_value_t = false
    )]
    fail_mode: bool,

    /// Nixpkgs input keys as a comma-separated list.
    #[arg(
        long,
        short,
        env = "NIX_FLAKE_CHECKER_NIXPKGS_KEYS",
        default_value = "nixpkgs",
        value_delimiter = ',',
        name = "KEY_LIST"
    )]
    nixpkgs_keys: Vec<String>,

    /// Display Markdown summary (in GitHub Actions).
    #[arg(
        long,
        short,
        env = "NIX_FLAKE_CHECKER_MARKDOWN_SUMMARY",
        default_value_t = true
    )]
    markdown_summary: bool,

    /// The Common Expression Language (CEL) policy to apply to each Nixpkgs input.
    #[arg(long, short, env = "NIX_FLAKE_CHECKER_CONDITION")]
    condition: Option<String>,
}

#[cfg(not(feature = "ref-statuses"))]
pub(crate) fn supported_refs(ref_statuses: BTreeMap<String, String>) -> Vec<String> {
    let mut return_value: Vec<String> = ref_statuses
        .iter()
        .filter_map(|(channel, status)| {
            if ["rolling", "stable", "deprecated"].contains(&status.as_str()) {
                Some(channel.clone())
            } else {
                None
            }
        })
        .collect();
    return_value.sort();
    return_value
}

#[cfg(not(feature = "ref-statuses"))]
#[tokio::main]
async fn main() -> Result<ExitCode, FlakeCheckerError> {
    tracing_subscriber::registry()
        .with(fmt::layer())
        .with(EnvFilter::from_default_env())
        .init();

    let ref_statuses: BTreeMap<String, String> =
        serde_json::from_str(include_str!("../ref-statuses.json")).unwrap();

    let Cli {
        no_telemetry,
        check_outdated,
        check_owner,
        check_supported,
        ignore_missing_flake_lock,
        flake_lock_path,
        fail_mode,
        nixpkgs_keys,
        markdown_summary,
        condition,
    } = Cli::parse();

    let (reporter, worker) = detsys_ids_client::builder!()
        .enable_reporting(!no_telemetry)
        .fact("check_owner", check_owner)
        .fact("check_outdated", check_outdated)
        .fact("check_supported", check_supported)
        .fact("ignore_missing_flake_lock", ignore_missing_flake_lock)
        .fact("flake_lock_path", flake_lock_path.to_string_lossy())
        .fact("fail_mode", fail_mode)
        .fact("condition", condition.as_deref())
        .build_or_default()
        .await;

    if !flake_lock_path.exists() {
        if ignore_missing_flake_lock {
            println!("no flake lockfile found at {flake_lock_path:?}; ignoring");
            return Ok(ExitCode::SUCCESS);
        } else {
            println!("no flake lockfile found at {flake_lock_path:?}");
            return Ok(ExitCode::FAILURE);
        }
    }

    let flake_lock = FlakeLock::new(&flake_lock_path)?;

    let flake_check_config = FlakeCheckConfig {
        check_supported,
        check_outdated,
        check_owner,
        nixpkgs_keys: nixpkgs_keys.clone(),
        fail_mode,
    };

    let allowed_refs = supported_refs(ref_statuses.clone());

    let issues = if let Some(condition) = &condition {
        evaluate_condition(
            &flake_lock,
            &nixpkgs_keys,
            condition,
            ref_statuses,
            allowed_refs.clone(),
        )?
    } else {
        check_flake_lock(&flake_lock, &flake_check_config, allowed_refs.clone())?
    };

    reporter
        .record(
            "flake_issues",
            Some(detsys_ids_client::Map::from_iter([
                (
                    "disallowed".into(),
                    issues
                        .iter()
                        .filter(|issue| issue.kind.is_disallowed())
                        .count()
                        .into(),
                ),
                (
                    "outdated".into(),
                    issues
                        .iter()
                        .filter(|issue| issue.kind.is_outdated())
                        .count()
                        .into(),
                ),
                (
                    "non_upstream".into(),
                    issues
                        .iter()
                        .filter(|issue| issue.kind.is_non_upstream())
                        .count()
                        .into(),
                ),
            ])),
        )
        .await;

    let summary = Summary::new(
        &issues,
        flake_lock_path,
        flake_check_config,
        allowed_refs,
        condition,
    );

    if std::env::var("GITHUB_ACTIONS").is_ok() {
        if markdown_summary {
            summary.generate_markdown()?;
        }
        summary.console_log_errors()?;
    } else {
        summary.generate_text()?;
    }

    drop(reporter);
    worker.wait().await;

    if fail_mode && !issues.is_empty() {
        return Ok(ExitCode::FAILURE);
    }

    Ok(ExitCode::SUCCESS)
}

#[cfg(feature = "ref-statuses")]
#[derive(Parser)]
struct Cli {
    // Check to make sure that Flake Checker is aware of the current supported branches.
    #[arg(long, hide = true)]
    check_ref_statuses: bool,

    // Check to make sure that Flake Checker is aware of the current supported branches.
    #[arg(long, hide = true)]
    get_ref_statuses: bool,
}

#[cfg(feature = "ref-statuses")]
fn main() -> Result<ExitCode, FlakeCheckerError> {
    let Cli {
        check_ref_statuses,
        get_ref_statuses,
    } = Cli::parse();

    if !get_ref_statuses && !check_ref_statuses {
        panic!("You must select either --get-ref-statuses or --check-ref-statuses");
    }

    if get_ref_statuses {
        match ref_statuses::fetch_ref_statuses() {
            Ok(refs) => {
                let json_refs = serde_json::to_string(&refs)?;
                println!("{json_refs}");
                return Ok(ExitCode::SUCCESS);
            }
            Err(e) => {
                println!("Error fetching ref statuses: {}", e);
                return Ok(ExitCode::FAILURE);
            }
        }
    }

    if check_ref_statuses {
        let mut ref_statuses: BTreeMap<String, String> =
            serde_json::from_str(include_str!("../ref-statuses.json")).unwrap();

        match ref_statuses::check_ref_statuses(ref_statuses) {
            Ok(equals) => {
                if equals {
                    println!("The reference statuses sets are up to date.");
                    return Ok(ExitCode::SUCCESS);
                } else {
                    println!(
                        "The reference statuses sets are NOT up to date. Make sure to update."
                    );
                    return Ok(ExitCode::FAILURE);
                }
            }
            Err(e) => {
                println!("Error checking ref statuses: {}", e);
                return Ok(ExitCode::FAILURE);
            }
        }
    }

    Ok(ExitCode::SUCCESS)
}


================================================
FILE: src/ref_statuses.rs
================================================
use crate::error::FlakeCheckerError;

use serde::Deserialize;

use std::collections::BTreeMap;

const ALLOWED_REFS_URL: &str = "https://prometheus.nixos.org/api/v1/query?query=channel_revision";

#[derive(Deserialize)]
struct Response {
    data: Data,
}

#[derive(Deserialize)]
struct Data {
    result: Vec<DataResult>,
}

#[derive(Deserialize)]
struct DataResult {
    metric: Metric,
}

#[derive(Deserialize)]
struct Metric {
    channel: String,
    status: String,
}

pub(crate) fn check_ref_statuses(
    ref_statuses: BTreeMap<String, String>,
) -> Result<bool, FlakeCheckerError> {
    Ok(fetch_ref_statuses()? == ref_statuses)
}

pub(crate) fn fetch_ref_statuses() -> Result<BTreeMap<String, String>, FlakeCheckerError> {
    let mut officially_supported: BTreeMap<String, String> =
        reqwest::blocking::get(ALLOWED_REFS_URL)?
            .json::<Response>()?
            .data
            .result
            .iter()
            .map(|res| (res.metric.channel.clone(), res.metric.status.clone()))
            .collect();

    Ok(officially_supported)
}


================================================
FILE: src/summary.rs
================================================
use crate::FlakeCheckConfig;
use crate::error::FlakeCheckerError;
use crate::flake::MAX_DAYS;
use crate::issue::{Issue, IssueKind};

use std::fs::OpenOptions;
use std::io::Write;
use std::path::PathBuf;

use handlebars::Handlebars;
use serde_json::json;

static CEL_MARKDOWN_TEMPLATE: &str = include_str!(concat!(
    env!("CARGO_MANIFEST_DIR"),
    "/src/templates/summary.cel.md.hbs"
));

static CEL_TEXT_TEMPLATE: &str = include_str!(concat!(
    env!("CARGO_MANIFEST_DIR"),
    "/src/templates/summary.cel.txt.hbs"
));

static STANDARD_MARKDOWN_TEMPLATE: &str = include_str!(concat!(
    env!("CARGO_MANIFEST_DIR"),
    "/src/templates/summary.standard.md.hbs"
));

static STANDARD_TEXT_TEMPLATE: &str = include_str!(concat!(
    env!("CARGO_MANIFEST_DIR"),
    "/src/templates/summary.standard.txt.hbs"
));

pub(crate) struct Summary {
    pub issues: Vec<Issue>,
    data: serde_json::Value,
    flake_lock_path: PathBuf,
    flake_check_config: FlakeCheckConfig,
    condition: Option<String>,
}

impl Summary {
    pub(crate) fn new(
        issues: &Vec<Issue>,
        flake_lock_path: PathBuf,
        flake_check_config: FlakeCheckConfig,
        allowed_refs: Vec<String>,
        condition: Option<String>,
    ) -> Self {
        let num_issues = issues.len();
        let clean = issues.is_empty();
        let issue_word = if issues.len() == 1 { "issue" } else { "issues" };

        let data = if let Some(condition) = &condition {
            let inputs_with_violations: Vec<String> = issues
                .iter()
                .filter(|i| i.kind.is_violation())
                .map(|i| i.input.to_owned())
                .collect();

            json!({
                "issues": issues,
                "num_issues": num_issues,
                "clean": clean,
                "dirty": !clean,
                "issue_word": issue_word,
                "condition": condition,
                "inputs_with_violations": inputs_with_violations,
            })
        } else {
            let disallowed: Vec<&Issue> =
                issues.iter().filter(|i| i.kind.is_disallowed()).collect();
            let outdated: Vec<&Issue> = issues.iter().filter(|i| i.kind.is_outdated()).collect();
            let non_upstream: Vec<&Issue> =
                issues.iter().filter(|i| i.kind.is_non_upstream()).collect();

            json!({
                "issues": issues,
                "num_issues": num_issues,
                "clean": clean,
                "dirty": !clean,
                "issue_word": issue_word,
                // Disallowed refs
                "has_disallowed": !disallowed.is_empty(),
                "disallowed": disallowed,
                // Outdated refs
                "has_outdated": !outdated.is_empty(),
                "outdated": outdated,
                // Non-upstream refs
                "has_non_upstream": !non_upstream.is_empty(),
                "non_upstream": non_upstream,
                // Constants
                "max_days": MAX_DAYS,
                "supported_ref_names": allowed_refs,
            })
        };

        Self {
            issues: issues.to_vec(),
            data,
            flake_lock_path,
            flake_check_config,
            condition,
        }
    }

    pub fn console_log_errors(&self) -> Result<(), FlakeCheckerError> {
        let file = self.flake_lock_path.to_string_lossy();

        if self.issues.is_empty() {
            println!("The Determinate Nix Flake Checker scanned {file} and found no issues");
            return Ok(());
        }

        if let Some(condition) = &self.condition {
            println!("You supplied this CEL condition for your flake:\n\n{condition}");
            println!("The following inputs violate that condition:\n");
            for issue in self.issues.iter() {
                println!("* {}", issue.input);
            }
        } else {
            let level = if self.flake_check_config.fail_mode {
                "error"
            } else {
                "warning"
            };

            for issue in self.issues.iter() {
                let input = &issue.input;

                let message: Option<String> = match &issue.kind {
                    IssueKind::Disallowed(disallowed) => {
                        if self.flake_check_config.check_supported {
                            let reference = &disallowed.reference;
                            Some(format!(
                                "the `{input}` input uses the non-supported Git branch `{reference}` for Nixpkgs"
                            ))
                        } else {
                            None
                        }
                    }
                    IssueKind::Outdated(outdated) => {
                        if self.flake_check_config.check_outdated {
                            let num_days_old = outdated.num_days_old;
                            Some(format!(
                                "the `{input}` input is {num_days_old} days old (the max allowed is {MAX_DAYS})"
                            ))
                        } else {
                            None
                        }
                    }
                    IssueKind::NonUpstream(non_upstream) => {
                        if self.flake_check_config.check_owner {
                            let owner = &non_upstream.owner;
                            Some(format!(
                                "the `{input}` input has the non-upstream owner `{owner}` rather than `NixOS` (upstream)"
                            ))
                        } else {
                            None
                        }
                    }
                    IssueKind::Violation => Some(String::from("policy violation")),
                };

                if let Some(message) = message {
                    println!("{}: {}", level.to_uppercase(), message);
                }
            }
        }
        Ok(())
    }

    pub fn generate_markdown(&self) -> Result<(), FlakeCheckerError> {
        let template = if self.condition.is_some() {
            CEL_MARKDOWN_TEMPLATE
        } else {
            STANDARD_MARKDOWN_TEMPLATE
        };

        let mut handlebars = Handlebars::new();

        handlebars
            .register_template_string("summary.md", template)
            .map_err(Box::new)?;
        let summary_md = handlebars.render("summary.md", &self.data)?;

        let summary_md_filepath = std::env::var("GITHUB_STEP_SUMMARY")?;
        let mut summary_md_file = OpenOptions::new()
            .append(true)
            .create(true)
            .open(summary_md_filepath)?;
        summary_md_file.write_all(summary_md.as_bytes())?;

        Ok(())
    }

    pub fn generate_text(&self) -> Result<(), FlakeCheckerError> {
        let template = if self.condition.is_some() {
            CEL_TEXT_TEMPLATE
        } else {
            STANDARD_TEXT_TEMPLATE
        };

        let mut handlebars = Handlebars::new();
        handlebars
            .register_template_string("summary.txt", template)
            .map_err(Box::new)?;

        let summary_txt = handlebars.render("summary.txt", &self.data)?;

        print!("{summary_txt}");

        Ok(())
    }
}


================================================
FILE: src/templates/summary.cel.md.hbs
================================================
# ![](https://avatars.githubusercontent.com/u/80991770?s=30) Flake checkup

{{#if clean}}
The Determinate Flake Checker Action scanned your `flake.lock` and didn't identify any issues.
All Nixpkgs inputs conform to the flake policy expressed in your supplied [Common Expression Language](https://cel.dev) condition.
{{/if}}

{{#if dirty}}
⚠️ The Determinate Nix Installer Action scanned your `flake.lock` and discovered {{num_issues}} {{issue_word}} that we recommend looking into.
You supplied this CEL condition:

```ruby
{{condition}}
```

The following inputs violate that condition:

{{#each inputs_with_violations}}
* `{{this}}`
{{/each}}
{{/if}}

<p>Feedback? Let us know at <a href="https://github.com/DeterminateSystems/flake-checker">DeterminateSystems/flake-checker</a>.</p>


================================================
FILE: src/templates/summary.cel.txt.hbs
================================================
Flake checker results:

{{#if clean}}
The flake checker scanned your flake.lock and didn't identify any issues. You specified this CEL
condition:

{{{condition}}}

All Nixpkgs inputs satisfy this condition.
{{/if}}
{{#if dirty}}
The flake checker scanned your flake.lock and discovered {{num_issues}} {{issue_word}}
that we recommend looking into. Here are the inputs that violate your supplied
condition:

{{#each inputs_with_violations}}
* {{this}}
{{/each}}
{{/if}}

================================================
FILE: src/templates/summary.standard.md.hbs
================================================
# ![](https://avatars.githubusercontent.com/u/80991770?s=30) Flake checkup

{{#if clean}}
The Determinate Flake Checker Action scanned your `flake.lock` and didn't identify any issues. All Nixpkgs inputs:

✅ Use supported branches
✅ Are less than 30 days old
✅ Use upstream Nixpkgs
{{/if}}
{{#if dirty}}
⚠️ The Determinate Nix Installer Action scanned your `flake.lock` and discovered {{num_issues}} {{issue_word}} that we recommend looking into.

{{#if has_disallowed}}
## Non-supported Git branches for Nixpkgs

{{#each disallowed}}
* The `{{this.input}}` input uses the `{{this.kind.reference}}` branch
{{/each}}

<details>
<summary>What to do 🧰</summary>
<p>Use one of these branches instead:</p>

{{#each supported_ref_names}}
* `{{this}}`
{{/each}}

<p>Here's an example:</p>

```nix
{
  inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
}
```
</details>

<details>
<summary>Why it's important to use supported branches 📚</summary>
<a href="https://zero-to-nix.com/concepts/nixos">NixOS</a>'s release branches stop receiving updates roughly 7 months after release and then gradually become more and more insecure over time.
Non-release branches receive unpredictable updates and should be avoided as dependencies.
Release branches are also certain to have good <a href="https://zero-to-nix.com/concepts/caching">binary cache</a> coverage, which other branches can't promise.
</details>
{{/if}}

{{#if has_outdated}}
## Outdated Nixpkgs dependencies

{{#each outdated}}
* The `{{this.input}}` input is **{{this.kind.num_days_old}}** days old
{{/each}}

The maximum recommended age is **{{max_days}}** days.

<details>
<summary>What to do 🧰</summary>
<p>For a more automated approach, use the <a href="https://github.com/determinateSystems/update-flake-lock"><code>update-flake-lock</code></a>
GitHub Action to create pull requests to update your <code>flake.lock</code>. Here's an example Actions workflow:</p>

```yaml
steps:
  - name: Automatically update flake.lock
    uses: DeterminateSystems/update-flake-lock
    with:
      pr-title: "Update flake.lock"        # PR title
      pr-labels: [dependencies, automated] # PR labels
```

<p>For a more ad hoc approach, use the <a href="https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake-update.html"><code>nix flake update</code></a> utility:</p>

```shell
nix flake update
```
</details>

<details>
<summary>Why it's important to keep Nix dependencies up to date 📚</summary>
<a href="https://github.com/NixOS/nixpkgs">Nixpkgs</a> receives a continuous stream of security patches to keep your software and systems secure.
Using outdated revisions of Nixpkgs can inadvertently expose you to software security risks that have been resolved in more recent releases.
</details>
{{/if}}

{{#if has_non_upstream}}
## Non-upstream Nixpkgs dependencies

{{#each non_upstream}}
* The `{{this.input}}` input has `{{this.kind.owner}}` as an owner rather than the `NixOS` org
{{/each}}

<details>
<summary>What to do 🧰</summary>
<p>Use a Nixpkgs dependency from the <a href="https://github.com/nixos"><code>NixOS</code></a> org. Here's an example:</p>

```nix
{
  inputs.nixpkgs.url = "github:NixOS/nixpkgs";
}
```

<p>If you need a customized version of Nixpkgs, we recommend that you use
<a href="https://nixos.wiki/wiki/Overlays">overlays</a> and
per-package <a href="https://ryantm.github.io/nixpkgs/using/overrides">overrides</a>.</p>
</details>

<details>
<summary>Why it's important to use upstream Nixpkgs 📚</summary>
We don't recommend using forked or re-exported versions of Nixpkgs.
While this may be convenient in some cases, it can introduce unexpected behaviors and unwanted security risks.
While <a href="https://github.com/NixOS/nixpkgs">upstream Nixpkgs</a> isn't bulletproof&mdash;nothing in software is!&mdash;it has a wide range of security measures in place, most notably continuous integration testing with <a href="https://hydra.nixos.org/">Hydra</a>, that mitigate a great deal of supply chain risk.
</details>
{{/if}}
{{/if}}

<p>Feedback? Let us know at <a href="https://github.com/DeterminateSystems/flake-checker">DeterminateSystems/flake-checker</a>.</p>


================================================
FILE: src/templates/summary.standard.txt.hbs
================================================
Flake checker results:

{{#if clean}}
The flake checker scanned your flake.lock and didn't identify any issues. All
Nixpkgs inputs:

> Use supported branches
> Are less than 30 days old
> Use upstream Nixpkgs
{{/if}}
{{#if dirty}}
The flake checker scanned your flake.lock and discovered {{num_issues}} {{issue_word}}
that we recommend looking into:

{{#if has_disallowed}}
>>> Non-supported Git branches for Nixpkgs

{{#each disallowed}}
> The {{this.input}} input uses the {{this.kind.reference}} branch
{{/each}}

>> What to do

Use one of these branches instead:

{{#each supported_ref_names}}
* {{this}}
{{/each}}

>> Why it's important to use supported branches

NixOS's release branches stop receiving updates roughly 7 months after release
and then gradually become more and more insecure over time. Non-release branches
receive unpredictable updates and should be avoided as dependencies. Release
branches are also certain to have good binary cache coverage, which other
branches can't promise.
{{/if}}

{{#if has_outdated}}
>>> Outdated Nixpkgs dependencies

{{#each outdated}}
> The {{this.input}} input is {{this.kind.num_days_old}} days old
{{/each}}

The maximum recommended age is {{max_days}} days.

>> What to do

For a more automated approach, use the update-flake-lock GitHub Action to create
create pull requests to update your flake.lock (if you're using Github Actions).

For a more ad hoc approach, use the nix flake update utility.

>> Why it's important to keep Nix dependencies up to date

Nixpkgs receives a continuous stream of security patches to keep your software
and systems secure. Using outdated revisions of Nixpkgs can inadvertently expose
you to software security risks that have been resolved in more recent releases.
{{/if}}

{{#if has_non_upstream}}
>>> Non-upstream Nixpkgs dependencies

{{#each non_upstream}}
> The {{this.input}} input has {{this.kind.owner}} as an owner rather
  than the NixOS org
{{/each}}

>> What to do

Use a Nixpkgs dependency from the NixOS org, such as github:NixOS/nixpkgs.

If you need a customized version of Nixpkgs, we recommend that you use overlays
and per-package overrides.

>> Why it's important to use upstream Nixpkgs

We don't recommend using forked or re-exported versions of Nixpkgs. While this
may be convenient in some cases, it can introduce unexpected behaviors and
unwanted security risks. While upstream Nixpkgs isn't bulletproof (nothing in
software is!) it has a wide range of security measures in place, most notably
continuous integration testing with Hydra, that mitigate a great deal of supply
chain risk.
{{/if}}
{{/if}}

================================================
FILE: templates/README.md.handlebars
================================================
# Nix Flake Checker

[![FlakeHub](https://img.shields.io/endpoint?url=https://flakehub.com/f/DeterminateSystems/flake-checker/badge)](https://flakehub.com/flake/DeterminateSystems/flake-checker)

**Nix Flake Checker** is a tool from [Determinate Systems][detsys] that performs "health" checks on the [`flake.lock`][lockfile] files in your [flake][flakes]-powered Nix projects.
Its goal is to help your Nix projects stay on recent and supported versions of [Nixpkgs].

To run the checker in the root of a Nix project:

```shell
nix run github:DeterminateSystems/flake-checker

# Or point to an explicit path for flake.lock
nix run github:DeterminateSystems/flake-checker /path/to/flake.lock
```

Nix Flake Checker looks at your `flake.lock`'s root-level [Nixpkgs] inputs.
There are two ways to express flake policies:

- Via [config parameters](#parameters).
- Via [policy conditions](#policy-conditions) using [Common Expression Language][cel] (CEL).

If you're running it locally, Nix Flake Checker reports any issues via text output in your terminal.
But you can also use Nix Flake Checker [in CI](#the-flake-checker-action).

## Supported branches

At any given time, [Nixpkgs] has a bounded set of branches that are considered _supported_.
The current list, with their statuses:

{{#each supported}}
- `{{@key}}`
{{/each}}

## Parameters

By default, Flake Checker verifies that:

- Any explicit Nixpkgs Git refs are in the [supported list](#supported-branches).
- Any Nixpkgs dependencies are less than 30 days old.
- Any Nixpkgs dependencies have the [`NixOS`][nixos-org] org as the GitHub owner (and thus that the dependency isn't a fork or non-upstream variant).

You can adjust this behavior via configuration (all are enabled by default but you can disable them):

| Flag                | Environment variable                | Action                                                     | Default |
| :------------------ | :---------------------------------- | :--------------------------------------------------------- | :------ |
| `--check-outdated`  | `NIX_FLAKE_CHECKER_CHECK_OUTDATED`  | Check for outdated Nixpkgs inputs                          | `true`  |
| `--check-owner`     | `NIX_FLAKE_CHECKER_CHECK_OWNER`     | Check that Nixpkgs inputs have `NixOS` as the GitHub owner | `true`  |
| `--check-supported` | `NIX_FLAKE_CHECKER_CHECK_SUPPORTED` | Check that Git refs for Nixpkgs inputs are supported       | `true`  |

## Policy conditions

You can apply a CEL condition to your flake using the `--condition` flag.
Here's an example:

```shell
flake-checker --condition "has(numDaysOld) && numDaysOld < 365"
```

This would check that each Nixpkgs input in your `flake.lock` is less than 365 days old.
These variables are available in each condition:

| Variable        | Description                                                                                                                              |
| :-------------- | :--------------------------------------------------------------------------------------------------------------------------------------- |
| `gitRef`        | The Git reference of the input.                                                                                                          |
| `numDaysOld`    | The number of days old the input is.                                                                                                     |
| `owner`         | The input's owner (if a GitHub input).                                                                                                   |
| `supportedRefs` | A list of [supported Git refs](#supported-branches) (all are branch names).                                                              |
| `refStatuses`   | A map. Each key is a branch name. Each value is a branch status (`"rolling"`, `"beta"`, `"stable"`, `"deprecated"` or `"unmaintained"`). |

We recommend a condition _at least_ this stringent:

```ruby
supportedRefs.contains(gitRef) && (has(numDaysOld) && numDaysOld < 30) && owner == 'NixOS'
```

Note that not all Nixpkgs inputs have a `numDaysOld` field, so make sure to ensure that that field exists when checking for the number of days.

Here are some other example conditions:

```ruby
# Updated in the last two weeks
supportedRefs.contains(gitRef) && (has(numDaysOld) && numDaysOld < 14) && owner == 'NixOS'

# Check for most recent stable Nixpkgs
gitRef.contains("24.05")
```

## The Nix Flake Checker Action

You can automate Nix Flake Checker by adding Determinate Systems' [Nix Flake Checker Action][action] to your GitHub Actions workflows:

```yaml
checks:
  steps:
    - uses: actions/checkout@v6
    - name: Check Nix flake Nixpkgs inputs
      uses: DeterminateSystems/flake-checker-action@main
```

When run in GitHub Actions, Nix Flake Checker always exits with a status code of 0 by default&mdash;and thus never fails your workflows&mdash;and reports its findings as a [Markdown summary][md].

## Telemetry

The goal of Nix Flake Checker is to help teams stay on recent and supported versions of Nixpkgs.
The flake checker collects a little bit of telemetry information to help us make that true.

To disable diagnostic reporting, set the diagnostics URL to an empty string by passing `--no-telemetry` or setting `FLAKE_CHECKER_NO_TELEMETRY=true`.

You can read the full privacy policy for [Determinate Systems][detsys], the creators of this tool and the [Determinate Nix Installer][installer], [here][privacy].

## Rust library

The Nix Flake Checker is written in [Rust].
This repo exposes a [`parse-flake-lock`](./parse-flake-lock) crate that you can use to parse [`flake.lock` files][lockfile] in your own Rust projects.
To add that dependency:

```toml
[dependencies]
parse-flake-lock = { git = "https://github.com/DeterminateSystems/flake-checker", branch = "main" }
```

Here's an example usage:

```rust
use std::path::Path;

use parse_flake_lock::{FlakeLock, FlakeLockParseError};

fn main() -> Result<(), FlakeLockParseError> {
    let flake_lock = FlakeLock::new(Path::new("flake.lock"))?;
    println!("flake.lock info:");
    println!("version: {version}", version=flake_lock.version);
    println!("root node: {root:?}", root=flake_lock.root);
    println!("all nodes: {nodes:?}", nodes=flake_lock.nodes);

    Ok(())
}
```

The `parse-flake-lock` crate doesn't yet exhaustively parse all input node types, instead using a "fallthrough" mechanism that parses input types that don't yet have explicit struct definitions to a [`serde_json::value::Value`][val].
If you'd like to help make the parser more exhaustive, [pull requests][prs] are quite welcome.

[action]: https://github.com/DeterminateSystems/flake-checker-action
[cel]: https://cel.dev
[detsys]: https://determinate.systems
[flakes]: https://zero-to-nix.com/concepts/flakes
[install]: https://zero-to-nix.com/start/install
[installer]: https://github.com/DeterminateSystems/nix-installer
[lockfile]: https://zero-to-nix.com/concepts/flakes#lockfile
[md]: https://github.blog/2022-05-09-supercharging-github-actions-with-job-summaries
[nixos-org]: https://github.com/NixOS
[nixpkgs]: https://github.com/NixOS/nixpkgs
[privacy]: https://determinate.systems/policies/privacy
[prs]: /pulls
[rust]: https://rust-lang.org
[telemetry]: https://github.com/DeterminateSystems/nix-flake-checker/blob/main/src/telemetry.rs#L29-L43
[val]: https://docs.rs/serde_json/latest/serde_json/value/enum.Value.html


================================================
FILE: tests/cel-condition.cel
================================================
['nixos-unstable', 'nixos-unstable-small', 'nixpkgs-unstable'].map(rev, supportedRefs.contains(rev))
    && owner == 'NixOS'
    && gitRef == 'nixos-unstable'
    && supportedRefs.contains(gitRef)
    && has(numDaysOld)
    && numDaysOld > 0
Download .txt
gitextract_m548giyf/

├── .cargo/
│   └── config.toml
├── .editorconfig
├── .envrc
├── .github/
│   └── workflows/
│       ├── build.yaml
│       ├── ci.yaml
│       ├── flakehub-publish-tagged.yaml
│       ├── ref-statuses.yaml
│       ├── release-branches.yaml
│       ├── release-prs.yaml
│       ├── release-tags.yaml
│       └── update-flake-lock.yaml
├── .gitignore
├── Cargo.toml
├── LICENSE
├── README.md
├── flake.nix
├── parse-flake-lock/
│   ├── Cargo.toml
│   └── src/
│       └── lib.rs
├── ref-statuses.json
├── src/
│   ├── condition.rs
│   ├── error.rs
│   ├── flake.rs
│   ├── issue.rs
│   ├── main.rs
│   ├── ref_statuses.rs
│   ├── summary.rs
│   └── templates/
│       ├── summary.cel.md.hbs
│       ├── summary.cel.txt.hbs
│       ├── summary.standard.md.hbs
│       └── summary.standard.txt.hbs
├── templates/
│   └── README.md.handlebars
└── tests/
    └── cel-condition.cel
Download .txt
SYMBOL INDEX (67 symbols across 8 files)

FILE: parse-flake-lock/src/lib.rs
  type FlakeLockParseError (line 23) | pub enum FlakeLockParseError {
  type FlakeLock (line 38) | pub struct FlakeLock {
    method deserialize (line 52) | fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
    method new (line 185) | pub fn new(path: &Path) -> Result<Self, FlakeLockParseError> {
  function chase_input_node (line 140) | fn chase_input_node(
  type Node (line 199) | pub enum Node {
    method variant (line 219) | fn variant(&self) -> &'static str {
  type Input (line 234) | pub enum Input {
  type RootNode (line 244) | pub struct RootNode {
  type RepoNode (line 253) | pub struct RepoNode {
  type RepoLocked (line 266) | pub struct RepoLocked {
  type RepoOriginal (line 286) | pub struct RepoOriginal {
  type IndirectNode (line 303) | pub struct IndirectNode {
  type IndirectOriginal (line 314) | pub struct IndirectOriginal {
  type PathNode (line 326) | pub struct PathNode {
  type PathLocked (line 337) | pub struct PathLocked {
  type PathOriginal (line 353) | pub struct PathOriginal {
  type TarballNode (line 366) | pub struct TarballNode {
  type TarballLocked (line 377) | pub struct TarballLocked {
  type TarballOriginal (line 393) | pub struct TarballOriginal {

FILE: src/condition.rs
  constant KEY_GIT_REF (line 12) | const KEY_GIT_REF: &str = "gitRef";
  constant KEY_NUM_DAYS_OLD (line 13) | const KEY_NUM_DAYS_OLD: &str = "numDaysOld";
  constant KEY_OWNER (line 14) | const KEY_OWNER: &str = "owner";
  constant KEY_REF_STATUSES (line 15) | const KEY_REF_STATUSES: &str = "refStatuses";
  constant KEY_SUPPORTED_REFS (line 16) | const KEY_SUPPORTED_REFS: &str = "supportedRefs";
  function evaluate_condition (line 18) | pub(super) fn evaluate_condition(
  function add_cel_variables (line 71) | fn add_cel_variables(
  function value_or_empty_string (line 85) | fn value_or_empty_string(value: Option<String>) -> Value {
  function value_or_zero (line 89) | fn value_or_zero(value: Option<i64>) -> Value {

FILE: src/error.rs
  type FlakeCheckerError (line 2) | pub enum FlakeCheckerError {

FILE: src/flake.rs
  constant MAX_DAYS (line 11) | pub const MAX_DAYS: i64 = 30;
  type FlakeCheckConfig (line 13) | pub(crate) struct FlakeCheckConfig {
  method default (line 22) | fn default() -> Self {
  function nixpkgs_deps (line 33) | pub(super) fn nixpkgs_deps(
  function check_flake_lock (line 79) | pub(crate) fn check_flake_lock(
  function num_days_old (line 139) | pub(super) fn num_days_old(timestamp: i64) -> i64 {
  function cel_conditions (line 158) | fn cel_conditions() {
  function clean_flake_locks (line 204) | fn clean_flake_locks() {
  function dirty_flake_locks (line 225) | fn dirty_flake_locks() {
  function explicit_nixpkgs_keys (line 280) | fn explicit_nixpkgs_keys() {
  function missing_nixpkgs_keys (line 309) | fn missing_nixpkgs_keys() {

FILE: src/issue.rs
  type Issue (line 4) | pub(crate) struct Issue {
  type IssueKind (line 11) | pub(crate) enum IssueKind {
    method is_disallowed (line 34) | pub(crate) fn is_disallowed(&self) -> bool {
    method is_outdated (line 38) | pub(crate) fn is_outdated(&self) -> bool {
    method is_non_upstream (line 42) | pub(crate) fn is_non_upstream(&self) -> bool {
    method is_violation (line 46) | pub(crate) fn is_violation(&self) -> bool {
  type Disallowed (line 19) | pub(crate) struct Disallowed {
  type Outdated (line 24) | pub(crate) struct Outdated {
  type NonUpstream (line 29) | pub(crate) struct NonUpstream {

FILE: src/main.rs
  type Cli (line 27) | struct Cli {
  function supported_refs (line 100) | pub(crate) fn supported_refs(ref_statuses: BTreeMap<String, String>) -> ...
  function main (line 117) | async fn main() -> Result<ExitCode, FlakeCheckerError> {
  type Cli (line 246) | struct Cli {
  function main (line 257) | fn main() -> Result<ExitCode, FlakeCheckerError> {

FILE: src/ref_statuses.rs
  constant ALLOWED_REFS_URL (line 7) | const ALLOWED_REFS_URL: &str = "https://prometheus.nixos.org/api/v1/quer...
  type Response (line 10) | struct Response {
  type Data (line 15) | struct Data {
  type DataResult (line 20) | struct DataResult {
  type Metric (line 25) | struct Metric {
  function check_ref_statuses (line 30) | pub(crate) fn check_ref_statuses(
  function fetch_ref_statuses (line 36) | pub(crate) fn fetch_ref_statuses() -> Result<BTreeMap<String, String>, F...

FILE: src/summary.rs
  type Summary (line 33) | pub(crate) struct Summary {
    method new (line 42) | pub(crate) fn new(
    method console_log_errors (line 106) | pub fn console_log_errors(&self) -> Result<(), FlakeCheckerError> {
    method generate_markdown (line 172) | pub fn generate_markdown(&self) -> Result<(), FlakeCheckerError> {
    method generate_text (line 196) | pub fn generate_text(&self) -> Result<(), FlakeCheckerError> {
Condensed preview — 32 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (113K chars).
[
  {
    "path": ".cargo/config.toml",
    "chars": 112,
    "preview": "[target.'cfg(target_os = \"linux\")']\nrustflags = [\n  \"--cfg\", \"tokio_unstable\",\n  \"-Crelocation-model=static\",\n]\n"
  },
  {
    "path": ".editorconfig",
    "chars": 236,
    "preview": "# https://editorconfig.org\nroot = true\n\n[*]\nindent_style = space\nindent_size = 2\nend_of_line = lf\ncharset = utf-8\ntrim_t"
  },
  {
    "path": ".envrc",
    "chars": 10,
    "preview": "use flake\n"
  },
  {
    "path": ".github/workflows/build.yaml",
    "chars": 1893,
    "preview": "name: Build flake-checker artifacts\n\non:\n  workflow_call:\n  workflow_dispatch:\n\njobs:\n  build-artifacts:\n    runs-on: ${"
  },
  {
    "path": ".github/workflows/ci.yaml",
    "chars": 4376,
    "preview": "name: Flake checker CI\n\non:\n  pull_request:\n  push:\n    branches: [main]\n\njobs:\n  checks:\n    name: Nix and Rust checks\n"
  },
  {
    "path": ".github/workflows/flakehub-publish-tagged.yaml",
    "chars": 799,
    "preview": "name: \"Publish tags to FlakeHub\"\n\non:\n  push:\n    tags:\n      - \"v?[0-9]+.[0-9]+.[0-9]+*\"\n  workflow_dispatch:\n    input"
  },
  {
    "path": ".github/workflows/ref-statuses.yaml",
    "chars": 1404,
    "preview": "name: Check that ref statuses are up to date\n\non:\n  schedule:\n    - cron: \"0 0 * * *\" # Daily\n\njobs:\n  check-ref-statuse"
  },
  {
    "path": ".github/workflows/release-branches.yaml",
    "chars": 1705,
    "preview": "name: Release Branch\n\non:\n  push:\n    branches:\n      # NOTE: make sure any branches here are also valid directory names"
  },
  {
    "path": ".github/workflows/release-prs.yaml",
    "chars": 2209,
    "preview": "name: Release PR\n\non:\n  pull_request:\n    types:\n      - opened\n      - reopened\n      - synchronize\n      - labeled\n\njo"
  },
  {
    "path": ".github/workflows/release-tags.yaml",
    "chars": 2040,
    "preview": "name: Release Tags\n\non:\n  push:\n    tags:\n      - \"v*.*.*\"\n\njobs:\n  build:\n    uses: ./.github/workflows/build.yaml\n\n  r"
  },
  {
    "path": ".github/workflows/update-flake-lock.yaml",
    "chars": 638,
    "preview": "name: update-flake-lock\n\non:\n  workflow_dispatch: # enable manual triggering\n  schedule:\n    - cron: \"0 0 */15 * *\" # ev"
  },
  {
    "path": ".gitignore",
    "chars": 160,
    "preview": "# Rust artifacts\n/target\n\n# Nix artifacts\nresult\n\n# Generated\nsummary.md\n!src/templates/summary.md\nsrc/policy.json\n\n# Re"
  },
  {
    "path": "Cargo.toml",
    "chars": 1224,
    "preview": "[package]\nname = \"flake-checker\"\nversion = \"0.2.11\"\nedition = \"2024\"\n\n[workspace]\nresolver = \"2\"\nmembers = [\".\", \"parse-"
  },
  {
    "path": "LICENSE",
    "chars": 11358,
    "preview": "\n                                 Apache License\n                           Version 2.0, January 2004\n                  "
  },
  {
    "path": "README.md",
    "chars": 7583,
    "preview": "# Nix Flake Checker\n\n[![FlakeHub](https://img.shields.io/endpoint?url=https://flakehub.com/f/DeterminateSystems/flake-ch"
  },
  {
    "path": "flake.nix",
    "chars": 6618,
    "preview": "{\n  inputs = {\n    nixpkgs.url = \"https://flakehub.com/f/NixOS/nixpkgs/0.1\";\n\n    fenix = {\n      url = \"https://flakehu"
  },
  {
    "path": "parse-flake-lock/Cargo.toml",
    "chars": 183,
    "preview": "[package]\nname = \"parse-flake-lock\"\nversion = \"0.1.1\"\nedition = \"2021\"\n\n[dependencies]\nserde = { workspace = true }\nserd"
  },
  {
    "path": "parse-flake-lock/src/lib.rs",
    "chars": 14565,
    "preview": "#![allow(dead_code)]\n\n//! A library for parsing Nix [`flake.lock`][lock] files\n//! into a structured Rust representation"
  },
  {
    "path": "ref-statuses.json",
    "chars": 314,
    "preview": "{\n  \"nixos-25.05\": \"unmaintained\",\n  \"nixos-25.05-small\": \"unmaintained\",\n  \"nixos-25.11\": \"stable\",\n  \"nixos-25.11-smal"
  },
  {
    "path": "src/condition.rs",
    "chars": 2842,
    "preview": "use cel_interpreter::{Context, Program, Value};\nuse parse_flake_lock::{FlakeLock, Node};\n\nuse std::collections::{BTreeMa"
  },
  {
    "path": "src/error.rs",
    "chars": 1040,
    "preview": "#[derive(Debug, thiserror::Error)]\npub enum FlakeCheckerError {\n    #[error(\"CEL execution error: {0}\")]\n    CelExecutio"
  },
  {
    "path": "src/flake.rs",
    "chars": 11902,
    "preview": "#![allow(dead_code)]\n\nuse std::collections::BTreeMap;\n\nuse crate::FlakeCheckerError;\nuse crate::issue::{Disallowed, Issu"
  },
  {
    "path": "src/issue.rs",
    "chars": 1088,
    "preview": "use serde::Serialize;\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\npub(crate) struct Issue {\n    pub input: String,\n  "
  },
  {
    "path": "src/main.rs",
    "chars": 8746,
    "preview": "mod condition;\nmod error;\nmod flake;\nmod issue;\nmod summary;\n\n#[cfg(feature = \"ref-statuses\")]\nmod ref_statuses;\n\nuse st"
  },
  {
    "path": "src/ref_statuses.rs",
    "chars": 1070,
    "preview": "use crate::error::FlakeCheckerError;\n\nuse serde::Deserialize;\n\nuse std::collections::BTreeMap;\n\nconst ALLOWED_REFS_URL: "
  },
  {
    "path": "src/summary.rs",
    "chars": 7251,
    "preview": "use crate::FlakeCheckConfig;\nuse crate::error::FlakeCheckerError;\nuse crate::flake::MAX_DAYS;\nuse crate::issue::{Issue, "
  },
  {
    "path": "src/templates/summary.cel.md.hbs",
    "chars": 786,
    "preview": "# ![](https://avatars.githubusercontent.com/u/80991770?s=30) Flake checkup\n\n{{#if clean}}\nThe Determinate Flake Checker "
  },
  {
    "path": "src/templates/summary.cel.txt.hbs",
    "chars": 468,
    "preview": "Flake checker results:\n\n{{#if clean}}\nThe flake checker scanned your flake.lock and didn't identify any issues. You spec"
  },
  {
    "path": "src/templates/summary.standard.md.hbs",
    "chars": 4157,
    "preview": "# ![](https://avatars.githubusercontent.com/u/80991770?s=30) Flake checkup\n\n{{#if clean}}\nThe Determinate Flake Checker "
  },
  {
    "path": "src/templates/summary.standard.txt.hbs",
    "chars": 2617,
    "preview": "Flake checker results:\n\n{{#if clean}}\nThe flake checker scanned your flake.lock and didn't identify any issues. All\nNixp"
  },
  {
    "path": "templates/README.md.handlebars",
    "chars": 7435,
    "preview": "# Nix Flake Checker\n\n[![FlakeHub](https://img.shields.io/endpoint?url=https://flakehub.com/f/DeterminateSystems/flake-ch"
  },
  {
    "path": "tests/cel-condition.cel",
    "chars": 242,
    "preview": "['nixos-unstable', 'nixos-unstable-small', 'nixpkgs-unstable'].map(rev, supportedRefs.contains(rev))\n    && owner == 'Ni"
  }
]

About this extraction

This page contains the full source code of the DeterminateSystems/flake-checker GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 32 files (104.6 KB), approximately 25.9k tokens, and a symbol index with 67 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!