[
  {
    "path": ".gitignore",
    "content": "/result*\n"
  },
  {
    "path": ".travis.yml",
    "content": "language: nix\naddons:\n  ssh_known_hosts: floki.garbas.si\nenv:\n- STDENV=clang\n- STDENV=clang36\n- STDENV=clang37\n- STDENV=clang38\n- STDENV=gcc\n- STDENV=gcc49\n- STDENV=gcc48\nscript:\n- if [ \"$TRAVIS_EVENT_TYPE\" == \"cron\" ]; then\n    nix-shell update.nix --pure;\n  fi\n- if [ \"$TRAVIS_PULL_REQUEST\" != \"true\" -a \"$TRAVIS_BRANCH\" = \"master\" ]; then\n    nix-build release.nix -A gecko.\"x86_64-linux\".\"$STDENV\";\n    mkdir nars/;\n    nix-push --dest \"$PWD/nars/\" --force ./result;\n  fi\nbefore_install:\n- openssl aes-256-cbc -K $encrypted_be02022e0814_key -iv $encrypted_be02022e0814_iv -in deploy_rsa.enc -out deploy_rsa -d\nbefore_deploy:\n- eval \"$(ssh-agent -s)\"\n- chmod 600 $TRAVIS_BUILD_DIR/deploy_rsa\n- ssh-add $TRAVIS_BUILD_DIR/deploy_rsa\ndeploy:\n  provider: script\n  skip_cleanup: true\n  script: rsync -avh --ignore-existing $TRAVIS_BUILD_DIR/nars/ travis@floki.garbas.si:/var/travis/nixpkgs-mozilla/\n  on:\n    branch: master\n"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "content": "# Community Participation Guidelines\n\nThis repository is governed by Mozilla's code of conduct and etiquette guidelines. \nFor more details, please read the\n[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). \n\n## How to Report\nFor more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page.\n\n<!--\n## Project Specific Etiquette\n\nIn some cases, there will be additional project etiquette i.e.: (https://bugzilla.mozilla.org/page.cgi?id=etiquette.html).\nPlease update for your project.\n-->\n"
  },
  {
    "path": "LICENSE",
    "content": "Copyright 2017 Mozilla\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\nof the Software, and to permit persons to whom the Software is furnished to do\nso, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "README.rst",
    "content": "nixpkgs-mozilla\n===============\n\nGathering nix efforts in one repository.\n\n\nCurrent packages\n----------------\n\n- gecko (https://github.com/mozilla/gecko-dev)\n- firefox-bin variants including Nightly\n\nfirefox-bin variants\n--------------------\n\nNixpkgs already has definitions for `firefox\n<https://github.com/NixOS/nixpkgs/blob/246d2848ff657d56fcf2d8596709e8869ce8616a/pkgs/applications/networking/browsers/firefox/packages.nix>`_,\nwhich is built from source, as well as `firefox-bin\n<https://github.com/NixOS/nixpkgs/blob/ba2fe3c9a626a8fb845c786383b8b23ad8355951/pkgs/applications/networking/browsers/firefox-bin/default.nix>`_,\nwhich is the binary Firefox version built by Mozilla.\n\nThe ``firefox-overlay.nix`` in this repository adds definitions for\nsome other firefox-bin variants that Mozilla ships:\n``firefox-nightly-bin``, ``firefox-beta-bin``, and\n``firefox-esr-bin``. All are exposed under a ``latest`` attribute,\ne.g. ``latest.firefox-nightly-bin``.\n\nUnfortunately, these variants do not auto-update, and you may see some\nannoying pop-ups complaining about this.\n\nNote that all the ``-bin`` packages are \"unfree\" (because of the\nFirefox trademark, held by Mozilla), so you will need to set\n``nixpkgs.config.allowUnfree`` in order to use them. More info `here\n<https://wiki.nixos.org/wiki/FAQ#How_can_I_install_a_package_from_unstable_while_remaining_on_the_stable_channel?>`_.\n\nRust overlay\n------------\n\n**NOTE:** Nix overlays only works on up-to-date versions of NixOS/nixpkgs, starting from 17.03.\n\nA nixpkgs overlay is provided to contain all of the latest rust releases.\n\nTo use the rust overlay run the ``./rust-overlay-install.sh`` command. It will\nlink the current ``./rust-overlay.nix`` into your ``~/.config/nixpkgs/overlays`` folder.\n\nOnce this is done, use ``nix-env -iA nixpkgs.latest.rustChannels.nightly.rust`` for\nexample. Replace the ``nixpkgs.`` prefix with ``nixos.`` on NixOS.\n\nUsing in nix expressions\n------------------------\n\nExample of using in ```shell.nix```:\n\n.. code:: nix\n\n let\n   moz_overlay = import (builtins.fetchTarball https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz);\n   nixpkgs = import <nixpkgs> { overlays = [ moz_overlay ]; };\n in\n   with nixpkgs;\n   stdenv.mkDerivation {\n     name = \"moz_overlay_shell\";\n     buildInputs = [\n       # to use the latest nightly:\n       nixpkgs.latest.rustChannels.nightly.rust\n       # to use a specific nighly:\n       (nixpkgs.rustChannelOf { date = \"2018-04-11\"; channel = \"nightly\"; }).rust\n       # to use the project's rust-toolchain file:\n       (nixpkgs.rustChannelOf { rustToolchain = ./rust-toolchain; }).rust\n     ];\n   }\n\nFlake usage\n-----------\nThis repository contains a minimal flake interface for the various\noverlays in this repository. To use it in your own flake, add it as\nan input to your ``flake.nix``:\n\n.. code:: nix\n\n {\n   inputs.nixpkgs.url = github:NixOS/nixpkgs;\n   inputs.nixpkgs-mozilla.url = github:mozilla/nixpkgs-mozilla;\n\n   outputs = { self, nixpkgs, nixpkgs-mozilla }: {\n     devShell.\"x86_64-linux\" = let\n       pkgs = import nixpkgs { system = \"x86_64-linux\"; overlays = [ nixpkgs-mozilla.overlay ]; };\n     in pkgs.mkShell {\n       buildInputs = [ pkgs.latest.rustChannels.nightly.rust ];\n     };\n   };\n  }\nThe available overlays are ``nixpkgs-mozilla.overlay`` for the\ndefault overlay containing everything, and\n``nixpkgs-mozilla.overlays.{lib, rust, rr, firefox, git-cinnabar}``\nrespectively. Depending on your use case, you might need to set the\n``--impure`` flag when invoking the ``nix`` command. This is because\nthis repository fetches resources from non-pinned URLs\nnon-reproducibly.\n\nUsing Custom Version of Firefox\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nTo use with a custom version of Firefox, you would have multiple choices to\nprovide either provide the full file information, such as the url and the\nchecksum of the file.\n\n.. code:: nix\n\n {\n   inputs.nixpkgs.url = nixpkgs/nixos-unstable;\n   inputs.nixpkgs-mozilla.url = github:mozilla/nixpkgs-mozilla;\n\n   outputs = { self, nixpkgs, nixpkgs-mozilla }: {\n     devShell.\"x86_64-linux\" = let\n       pkgs = import nixpkgs { system = \"x86_64-linux\"; overlays = [ nixpkgs-mozilla.overlay ]; };\n       firefox-nightly150 = pkgs.lib.firefoxOverlay.firefoxVersion {\n         info = {\n           url = \"https://download.cdn.mozilla.net/pub/firefox/nightly/2026/03/2026-03-05-00-23-19-mozilla-central/firefox-150.0a1.en-US.linux-x86_64.tar.xz\";\n           sha512 = \"8faa93d786d618963a7e5f5bf16488523aac2faeb9a27051b1002a44f56a31c93af72db16a00af9ec97786666ff0498a7fc9e95480e967ff1aa55346e57fcef3\";\n           verifiedByHand = true;\n         };\n       };\n     in pkgs.mkShell { buildInputs = [ firefox-nightly150 ];};\n   };\n  }\n\nIn this example, the checksum is taken out of the checksums file, which is in\nthe same directory as the compressed package.\n\nIn this particular case, as this is a nightly version, we could have set the\nrelease attribute to `false`, with the version and the timestamp of the\ndirectory:\n\n.. code:: nix\n\n       firefox-nightly150 = pkgs.lib.firefoxOverlay.firefoxVersion {\n         release = false;\n         version = \"150.0a1\";\n         timestamp = \"2026-03-05-00-23-19\";\n       };\n\nAnd we could have omitted the timestamp if we only cared about the latest\nnightly version.\n\nTo grab a beta or release version, we only have to specify the version number\nand setting the release attribute to `true`:\n\n.. code:: nix\n\n       firefox-nightly149 = pkgs.lib.firefoxOverlay.firefoxVersion {\n         release = true;\n         version = \"149.0b1\";\n       };\n\nFirefox Development Environment\n-------------------------------\n\nThis repository provides several tools to facilitate development on\nFirefox. Firefox is built on an engine called Gecko, which lends its\nname to some of the files and derivations in this repo.\n\nChecking out Firefox\n~~~~~~~~~~~~~~~~~~~~\n\nTo build Firefox from source, it is best to have a local checkout of\n``mozilla-central``. ``mozilla-central`` is hosted in Mercurial, but\nsome people prefer to access it using ``git`` and\n``git-cinnabar``. The tools in this repo support either using\nmercurial or git.\n\nThis repository provides a ``git-cinnabar-overlay.nix`` which defines\na ``git-cinnabar`` derivation. This overlay can be used to install\n``git-cinnabar``, either using ``nix-env`` or as part of a system-wide\n``configuration.nix``.\n\nBuilding Firefox\n~~~~~~~~~~~~~~~~\n\nThe ``firefox-overlay.nix`` provides an environment to build Firefox\nfrom its sources, once you have finished the checkout of\n``mozilla-central``. You can use ``nix-shell`` to enter this\nenvironment to launch ``mach`` commands to build Firefox and test your\nbuild.\n\nSome debugging tools are available in this environment as well, but\nother development tools (such as those used to submit changes for\nreview) are outside the scope of this environment.\n\nThe ``nix-shell`` environment is available in the\n``gecko.<arch>.<cc>`` attribute of the ``release.nix`` file provided\nin this repository.\n\nThe ``<arch>`` attribute is either ``x86_64-linux`` or ``i686-linux``. The first\none would create a native toolchain for compiling on x64, while the second one\nwould give a native toolchain for compiling on x86. Note that due to the size of\nthe compilation units on x86, the compilation might not be able to complete, but\nsome sub part of Gecko, such as SpiderMonkey would compile fine.\n\nThe ``<cc>`` attribute is either ``gcc`` or ``clang``, or any specific version\nof the compiler available in the ``compiler-overlay.nix`` file which is repeated\nin ``release.nix``. This compiler would only be used for compiling Gecko, and\nthe rest of the toolchain is compiled against the default ``stdenv`` of the\narchitecture.\n\nWhen first entering the ``nix-shell``, the toolchain will pull and build all\nthe dependencies necessary to build Gecko, this includes might take some time.\nThis work will not be necessary the second time, unless you use a different\ntoolchain or architecture.\n\n.. code:: sh\n\n  ~/$ cd mozilla-central\n  ~/mozilla-central$ nix-shell ../nixpkgs-mozilla/release.nix -A gecko.x86_64-linux.gcc --pure\n    ... pull the rust compiler\n    ... compile the toolchain\n  # First time only - initialize virtualenv\n  [~/mozilla-central] python ./mach create-mach-environment\n     ... create .mozbuild/_virtualenvs/mach\n  [~/mozilla-central] python ./mach build\n    ... build firefox desktop\n  [~/mozilla-central] python ./mach run\n    ... run firefox\n\nWhen entering the ``nix-shell``, the ``MOZCONFIG`` environment variable is set\nto a local file, named ``.mozconfig.nix-shell``, created each time you enter the\n``nix-shell``. You can create your own ``.mozconfig`` file which extends the\ndefault one, with your own options.\n\n.. code:: sh\n\n  ~/mozilla-central$ nix-shell ../nixpkgs-mozilla/release.nix -A gecko.x86_64-linux.gcc --pure\n  [~/mozilla-central] cat .mozconfig\n  # Import current nix-shell config.\n  . .mozconfig.nix-shell\n\n  ac_add_options --enable-js-shell\n  ac_add_options --disable-tests\n  [~/mozilla-central] export MOZCONFIG=\"$(pwd)/.mozconfig\"\n  [~/mozilla-central] python ./mach build\n\nTo avoid repeating yourself, you can also rely on the ``NIX_SHELL_HOOK``\nenvironment variable, to reset the ``MOZCONFIG`` environment variable for you.\n\n.. code:: sh\n\n  ~/mozilla-central$ export NIX_SHELL_HOOK=\"export MOZCONFIG=$(pwd)/.mozconfig;\"\n  ~/mozilla-central$ nix-shell ../nixpkgs-mozilla/release.nix -A gecko.x86_64-linux.gcc --pure\n  [~/mozilla-central] python ./mach build\n\nSubmitting Firefox patches\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nFirefox development happens in `Mozilla Phabricator\n<https://phabricator.services.mozilla.com/>`_. Mozilla Phabricator\ndocs are `here\n<https://moz-conduit.readthedocs.io/en/latest/phabricator-user.html>`_.\n\nTo get your commits into Phabricator, some options include:\n\n- Arcanist, the upstream tool for interacting with\n  Phabricator. Arcanist is packaged in nixpkgs already; you can find\n  it in `nixos.arcanist`. Unfortunately, as of this writing, upstream\n  Arcanist does not support ``git-cinnabar`` (according to `the\n  \"Setting up Arcanist\"\n  <https://moz-conduit.readthedocs.io/en/latest/phabricator-user.html#setting-up-arcanist>`_\n  documentation). `Mozilla maintains a fork of Arcanist\n  <https://github.com/mozilla-conduit/arcanist>`_ but it isn't yet\n  packaged. (PRs welcome.)\n\n- `moz-phab <https://github.com/mozilla-conduit/review>`_, an in-house\n  CLI for Phabricator. It's available in nix packages (unstable channel).\n\n- `phlay <https://github.com/mystor/phlay>`_, a small Python script\n  that speaks to the Phabricator API directly. This repository ships a\n  ``phlay-overlay.nix`` that you can use to make ``phlay`` available\n  in a nix-shell or nix-env.\n\nNote: although the ``nix-shell`` from the previous section may have\nall the tools you would normally use to do Firefox development, it\nisn't recommended that you use that shell for anything besides tasks\nthat involve running ``mach``. Other development tasks such as\ncommitting code and submitting patches to code review are best handled\nin a separate nix-shell.\n\nTODO\n----\n\n- setup hydra to have binary channels\n\n- make sure pinned revisions get updated automatically (if build passes we\n  should update revisions in default.nix)\n\n- pin to specific (working) nixpkgs revision (as we do for other sources)\n\n- can we make this work on darwin as well?\n\n- assign maintainers for our packages that will montior that it \"always\" builds\n\n- hook it with vulnix report to monitor CVEs (once vulnix is ready, it must be\n  ready soon :P)\n"
  },
  {
    "path": "compilers-overlay.nix",
    "content": "# This overlays add a customStdenv attribute which provide an stdenv with\n# different versions of the compilers. This can be used to test Gecko builds\n# against different compiler settings, or different compiler versions.\n#\n# See release.nix \"builder\" function, to understand how these different stdenv\n# are used.\nself: super: \n\nlet\n  noSysDirs = (super.stdenv.system != \"x86_64-darwin\"\n             && super.stdenv.system != \"x86_64-freebsd\"\n             && super.stdenv.system != \"i686-freebsd\"\n             && super.stdenv.system != \"x86_64-kfreebsd-gnu\");\n  crossSystem = null;\n\n  gcc473 = super.wrapCC (super.callPackage ./pkgs/gcc-4.7 (with self; {\n    inherit noSysDirs;\n    texinfo = texinfo4;\n    # I'm not sure if profiling with enableParallelBuilding helps a lot.\n    # We can enable it back some day. This makes the *gcc* builds faster now.\n    profiledCompiler = false;\n\n    # When building `gcc.crossDrv' (a \"Canadian cross\", with host == target\n    # and host != build), `cross' must be null but the cross-libc must still\n    # be passed.\n    cross = null;\n    libcCross = if crossSystem != null then libcCross else null;\n    libpthreadCross =\n      if crossSystem != null && crossSystem.config == \"i586-pc-gnu\"\n      then gnu.libpthreadCross\n      else null;\n  }));\n\n  # By default wrapCC keep the same header files, but NixOS is using the\n  # latest header files from GCC, which are not supported by clang, because\n  # clang implement a different set of locking primitives than GCC.  This\n  # expression is used to wrap clang with a matching verion of the libc++.\n  maybeWrapClang = cc: cc;\n  /*\n    if cc ? clang\n    then clangWrapCC cc\n    else cc;\n    */\n\n  clangWrapCC = llvmPackages:\n    let libcxx =\n      super.lib.overrideDerivation llvmPackages.libcxx (drv: {\n        # https://bugzilla.mozilla.org/show_bug.cgi?id=1277619\n        # https://llvm.org/bugs/show_bug.cgi?id=14435\n        patches = drv.patches ++ [ ./pkgs/clang/bug-14435.patch ];\n      });\n    in\n    super.callPackage <nixpkgs/pkgs/build-support/cc-wrapper> {\n      cc = llvmPackages.clang-unwrapped or llvmPackages.clang;\n      isClang = true;\n      stdenv = self.clangStdenv;\n      libc = self.glibc;\n      # cc-wrapper pulls gcc headers, which are not compatible with features\n      # implemented in clang.  These packages are used to override that.\n      extraPackages = [ self.libcxx llvmPackages.libcxxabi ];\n      nativeTools = false;\n      nativeLibc = false;\n    };\n\n  buildWithCompiler = cc:\n    super.stdenvAdapters.overrideCC self.stdenv (maybeWrapClang cc);\n\n  chgCompilerSource = cc: name: src:\n    cc.override (conf:\n      if conf ? gcc then # Nixpkgs 14.12\n        { gcc = super.lib.overrideDerivation conf.gcc (old: { inherit name src; }); }\n      else # Nixpkgs 15.05\n        { cc = super.lib.overrideDerivation conf.cc (old: { inherit name src; }); }\n    );\n\n  compilersByName = with self; {\n    clang = llvmPackages.clang;\n    clang36 = llvmPackages_36.clang;\n    clang37 = llvmPackages_37.clang;\n    clang38 = llvmPackages_38.clang; # not working yet.\n    clang5 = llvmPackages_5.clang or llvmPackages.clang;\n    clang6 = llvmPackages_6.clang or llvmPackages.clang;\n    clang7 = llvmPackages_7.clang or llvmPackages.clang;\n    clang12 = llvmPackages_12.clang or llvmPackages.clang;\n    clang13 = llvmPackages_13.clang or llvmPackages.clang;\n    gcc = gcc;\n    gcc6 = gcc6;\n    gcc5 = gcc5;\n    gcc49 = gcc49;\n    gcc48 = gcc48;\n    gcc474 = chgCompilerSource gcc473 \"gcc-4.7.4\" (fetchurl {\n      url = \"mirror://gnu/gcc/gcc-4.7.4/gcc-4.7.4.tar.bz2\";\n      sha256 = \"10k2k71kxgay283ylbbhhs51cl55zn2q38vj5pk4k950qdnirrlj\";\n    });\n    gcc473 = gcc473;\n    # Version used on Linux slaves, except Linux x64 ASAN.\n    gcc472 = chgCompilerSource gcc473 \"gcc-4.7.2\" (fetchurl {\n      url = \"mirror://gnu/gcc/gcc-4.7.2/gcc-4.7.2.tar.bz2\";\n      sha256 = \"115h03hil99ljig8lkrq4qk426awmzh0g99wrrggxf8g07bq74la\";\n    });\n  };\n\nin {\n  customStdenvs =\n    super.lib.mapAttrs (name: value: buildWithCompiler value) compilersByName;\n}\n"
  },
  {
    "path": "default.nix",
    "content": "# Nixpkgs overlay which aggregates overlays for tools and products, used and\n# published by Mozilla.\nself: super:\n\nwith super.lib;\n\n(foldl' (flip extends) (_: super)\n  (map import (import ./overlays.nix)))\n  self\n"
  },
  {
    "path": "firefox-overlay.nix",
    "content": "# This file provide the latest binary versions of Firefox published by Mozilla.\nself: super:\n\nlet\n  # This URL needs to be updated about every 2 years when the subkey is rotated.\n  pgpKey = super.fetchurl {\n    url = \"https://download.cdn.mozilla.net/pub/firefox/candidates/138.0b1-candidates/build1/KEY\";\n    hash = \"sha256-FOGtyDxtZpW6AbNdSj0QoK1AYkQYxHPypT8zJr2XYQk=\";\n  };\n\n  # This file is currently maintained manually, if this Nix expression attempt\n  # to download the wrong version, this is likely to be the problem.\n  #\n  # Open a pull request against https://github.com/mozilla-releng/shipit to\n  # update the version, as done in\n  # https://github.com/mozilla-releng/shipit/pull/1467\n  firefox_versions = with builtins;\n    fromJSON (readFile (fetchurl \"https://product-details.mozilla.org/1.0/firefox_versions.json\"));\n\n  arch = if self.stdenv.system == \"i686-linux\"\n    then \"linux-i686\"\n    else \"linux-x86_64\";\n\n  yearOf = with super.lib; yyyymmddhhmmss:\n    head (splitString \"-\" yyyymmddhhmmss);\n  monthOf = with super.lib; yyyymmddhhmmss:\n    head (tail (splitString \"-\" yyyymmddhhmmss));\n\n  # Given SHA512SUMS file contents and file name, extract matching sha512sum.\n  extractSha512Sum = sha512sums: file:\n    with builtins;\n    # Nix 1.x do not have `builtins.split`.\n    # Nix 2.0 have an bug in `builtins.match` (see https://github.com/NixOS/nix/issues/2147).\n    # So I made separate logic for Nix 1.x and Nix 2.0.\n    if builtins ? split then\n      substring 0 128 (head\n        (super.lib.filter\n          (s: isString s && substring 128 (stringLength s) s == \"  ${file}\")\n          (split \"\\n\" sha512sums)))\n    else\n      head (match \".*[\\n]([0-9a-f]*)  ${file}.*\" sha512sums);\n\n  # The timestamp argument is a yyyy-mm-dd-hh-mm-ss date, which corresponds to\n  # one specific version. This is used mostly for bisecting.\n  versionInfo = { name, version, release, system ? arch, timestamp ? null, info ? null, ... }: with builtins;\n    if (info != null) then info else\n    if release then\n      # For versions such as Beta & Release:\n      # https://download.cdn.mozilla.net/pub/firefox/releases/55.0b3/SHA256SUMS\n      let\n        dir = \"https://download.cdn.mozilla.net/pub/firefox/releases/${version}\";\n        # After version 134 firefox switched to using tar.xz instead of tar.bz2\n        majorVersion = super.lib.strings.toInt (\n          builtins.elemAt (super.lib.strings.splitString \".\" version) 0\n        );\n        extension = if majorVersion > 134 then \"tar.xz\" else \"tar.bz2\";\n        file = \"${system}/en-US/firefox-${version}.${extension}\";\n        sha512Of = chksum: file: extractSha512Sum (readFile (fetchurl chksum)) file;\n      in rec {\n        chksum = \"${dir}/SHA512SUMS\";\n        chksumSig = \"${chksum}.asc\";\n        chksumSha256 = hashFile \"sha256\" (fetchurl \"${dir}/SHA512SUMS\");\n        chksumSigSha256 = hashFile \"sha256\" (fetchurl \"${chksum}.asc\");\n        inherit file;\n        url = \"${dir}/${file}\";\n        sha512 = sha512Of chksum file;\n        sig = null;\n        sigSha512 = null;\n      }\n    else\n      # For Nightly versions:\n      # https://download.cdn.mozilla.net/pub/firefox/nightly/latest-mozilla-central/firefox-56.0a1.en-US.linux-x86_64.checksums\n      let\n        dir =\n          if timestamp == null then\n            let\n              buildhubJSON = with builtins;\n                fromJSON (readFile (fetchurl \"https://download.cdn.mozilla.net/pub/firefox/nightly/latest-mozilla-central/firefox-${version}.en-US.${system}.buildhub.json\"));\n            in builtins.replaceStrings [ \"/${file}\" ] [ \"\" ] buildhubJSON.download.url\n          else \"https://download.cdn.mozilla.net/pub/firefox/nightly/${yearOf timestamp}/${monthOf timestamp}/${timestamp}-mozilla-central\" ;\n        file = \"firefox-${version}.en-US.${system}.tar.xz\";\n        sha512Of = chksum: file: head (match \".*[\\n]([0-9a-f]*) sha512 [0-9]* ${file}[\\n].*\" (readFile (fetchurl chksum)));\n      in rec {\n        chksum = \"${dir}/firefox-${version}.en-US.${system}.checksums\";\n        chksumSig = null;\n        # file content:\n        # <hash> sha512 62733881 firefox-56.0a1.en-US.linux-x86_64.tar.bz2\n        # <hash> sha256 62733881 firefox-56.0a1.en-US.linux-x86_64.tar.bz2\n        url = \"${dir}/${file}\";\n        sha512 = sha512Of chksum file;\n        sig = \"${dir}/${file}.asc\";\n        sigSha512 = sha512Of chksum \"${file}.asc\";\n      };\n\n  # From the version info, check the authenticity of the check sum file, such\n  # that we guarantee that we have\n  verifyFileAuthenticity = { file, sha512, chksum, chksumSig }:\n    assert extractSha512Sum (builtins.readFile chksum) file == sha512;\n    super.runCommand \"check-firefox-signature\" {\n      buildInputs = [ self.gnupg ];\n      FILE = chksum;\n      ASC = chksumSig;\n    } ''\n      set -eu\n      gpg --dearmor < ${pgpKey} > keyring.gpg\n      gpgv --keyring=./keyring.gpg $ASC $FILE\n      mkdir $out\n    '';\n\n  # From the version info, create a fetchurl derivation which will get the\n  # sources from the remote.\n  fetchVersion = info:\n    if info.verifiedByHand or false then\n      # Set info.verifiedByHand = true; when testing with tarball.\n      super.fetchurl {\n        inherit (info) url sha512;\n      }\n    else if info.chksumSig != null then\n      super.fetchurl {\n        inherit (info) url sha512;\n\n        # This is a fixed derivation, but we still add as a dependency the\n        # verification of the checksum.  Thus, this fetch script can only be\n        # executed once the verifyAuthenticity script finished successfully.\n        postFetch = ''\n          : # Authenticity Check (${verifyFileAuthenticity {\n            inherit (info) file sha512;\n            chksum = builtins.fetchurl { url = info.chksum; sha256 = info.chksumSha256; };\n            chksumSig = builtins.fetchurl { url = info.chksumSig; sha256 = info.chksumSigSha256; };\n          }})\n        '';\n      }\n    else\n      super.fetchurl {\n        inherit (info) url sha512;\n\n        # This would download the tarball, and then verify that the content\n        # match the signature file. Fortunately, any failure of this code would\n        # prevent the output from being reused.\n        postFetch =\n          let asc = super.fetchurl { url = info.sig; sha512 = info.sigSha512; }; in ''\n          : # Authenticity Check\n          set -eu\n          export PATH=\"$PATH:${self.gnupg}/bin/\"\n          gpg --dearmor < ${pgpKey} > keyring.gpg\n          gpgv --keyring=./keyring.gpg ${asc} $out\n        '';\n      };\n\n  versionWithDefaults = version:\n    { name = \"Firefox Twilight\";\n      version = \"0.0a1\";\n      channel = \"twilight\";\n      wmClass = \"firefox-twilight\";\n      release = false;\n      # info attribute set is either null, in which case it is infered by\n      # versionInfo, or it should be an attribute set with either:\n      #\n      #   1. Manual verification of packages:\n      #     url = \"...\";\n      #     sha512 = \"...\";\n      #     verifiedByHand = true;\n      #\n      #   2. Using a checksum file, which is itself verified using the gpg key.\n      #     url = \"...\";\n      #     file = \"...\";\n      #     sha512 = \"...\";\n      #     chksum = \"...\";\n      #     chksumSha256 = \"...\";\n      #     chksumSig = \"...\";\n      #     chksumSigSha256 = \"...\";\n      #\n      #   3. Using the gpg key on the archive\n      #     url = \"...\";\n      #     sha512 = \"...\";\n      #     sig = \"...\";\n      #     sigSha512 = \"...\";\n    } // version;\n\n  firefoxVersion = version':\n    let\n      version = versionWithDefaults version';\n      info = versionInfo version;\n      pkg = ((self.firefox-bin-unwrapped.override ({\n        generated = {\n          version = version.version;\n          sources = { inherit (info) url sha512; };\n        };\n      } // super.lib.optionalAttrs (self.firefox-bin-unwrapped.passthru ? applicationName) {\n        applicationName = version.name;\n      })).overrideAttrs (old: {\n        # Add a dependency on the signature check.\n        src = fetchVersion info;\n      }));\n      in super.wrapFirefox pkg ({\n        pname = \"${pkg.binaryName}-bin\";\n        wmClass = version.wmClass;\n      } // super.lib.optionalAttrs (!self.firefox-bin-unwrapped.passthru ? applicationName) {\n        desktopName = version.name;\n      });\n\n  firefoxVariants = {\n    firefox-nightly-bin = {\n      name = \"Firefox Nightly\";\n      channel = \"nightly\";\n      wmClass = \"firefox-nightly\";\n      version = firefox_versions.FIREFOX_NIGHTLY;\n      release = false;\n    };\n    firefox-beta-bin = {\n      name = \"Firefox Beta\";\n      channel = \"beta\";\n      wmClass = \"firefox-beta\";\n      version = firefox_versions.LATEST_FIREFOX_DEVEL_VERSION;\n      release = true;\n    };\n    firefox-bin = {\n      name = \"Firefox\";\n      channel = \"release\";\n      wmClass = \"firefox\";\n      version = firefox_versions.LATEST_FIREFOX_VERSION;\n      release = true;\n    };\n    firefox-esr-bin = {\n      name = \"Firefox ESR\";\n      channel = \"release\";\n      wmClass = \"firefox\";\n      version = firefox_versions.FIREFOX_ESR;\n      release = true;\n    };\n  };\nin\n\n{\n  lib = super.lib // {\n    firefoxOverlay = {\n      inherit pgpKey firefoxVersion versionInfo firefox_versions firefoxVariants;\n    };\n  };\n\n  # Set of packages which are automagically updated. Do not rely on these for\n  # reproducible builds.\n  latest = (super.latest or {}) // (builtins.mapAttrs (n: v: firefoxVersion v) firefoxVariants);\n\n  # Set of packages which used to build developer environment\n  devEnv = (super.shell or {}) // {\n    gecko = super.callPackage ./pkgs/gecko {\n      inherit (self.python38Packages) setuptools;\n      pythonFull = self.python38Full;\n      nodejs =\n        if builtins.compareVersions self.nodejs.name \"nodejs-8.11.3\" < 0\n        then self.nodejs-8_x else self.nodejs;\n\n      rust-cbindgen =\n        if !(self ? \"rust-cbindgen\") then self.rust-cbindgen-latest\n        else if builtins.compareVersions self.rust-cbindgen.version self.rust-cbindgen-latest.version < 0\n        then self.rust-cbindgen-latest else self.rust-cbindgen;\n\n      # Due to std::ascii::AsciiExt changes in 1.23, Gecko does not compile, so\n      # use the latest Rust version before 1.23.\n      # rust = (super.rustChannelOf { channel = \"stable\"; date = \"2017-11-22\"; }).rust;\n      # rust = (super.rustChannelOf { channel = \"stable\"; date = \"2020-03-12\"; }).rust;\n      inherit (self.latest.rustChannels.stable) rust;\n    };\n  };\n\n  # Use rust-cbindgen imported from Nixpkgs (September 2018) unless the current\n  # version of Nixpkgs already packages a version of rust-cbindgen.\n  rust-cbindgen-latest = super.callPackage ./pkgs/cbindgen {\n    rustPlatform = super.makeRustPlatform {\n      cargo = self.latest.rustChannels.stable.rust;\n      rustc = self.latest.rustChannels.stable.rust;\n    };\n  };\n\n  jsdoc = super.callPackage ./pkgs/jsdoc {};\n}\n"
  },
  {
    "path": "flake.nix",
    "content": "{\n  description = \"Mozilla overlay for Nixpkgs\";\n\n  outputs = { self, ... }: {\n    # Default overlay.\n    overlay = import ./default.nix;\n\n    # Inidividual overlays.\n    overlays = {\n      lib = import ./lib-overlay.nix;\n      rust = import ./rust-overlay.nix;\n      firefox = import ./firefox-overlay.nix;\n      git-cinnabar = import ./git-cinnabar-overlay.nix;\n    };\n  };\n}\n"
  },
  {
    "path": "git-cinnabar-overlay.nix",
    "content": "self: super:\n\n{\n  git-cinnabar = super.callPackage ./pkgs/git-cinnabar {\n    # we need urllib to recognize ssh.\n    # python = self.pythonFull;\n    python = self.mercurial.python;\n  };\n}\n"
  },
  {
    "path": "lib/parseTOML.nix",
    "content": "with builtins;\n\n# Tokenizer.\nlet\n  layout_pat = \"[ \\n]+\";\n  layout_pat_opt = \"[ \\n]*\";\n  token_pat = ''=|[[][[][a-zA-Z0-9_.\"*-]+[]][]]|[[][a-zA-Z0-9_.\"*-]+[]]|[[][^]]+[]]|[a-zA-Z0-9_-]+|\"[^\"]*\"''; #\"\n\n  tokenizer_1_11 = str:\n    let\n      tokenizer_rec = len: prevTokens: patterns: str:\n        let\n          pattern = head patterns;\n          layoutAndTokens = match pattern str;\n          matchLength = stringLength (head layoutAndTokens);\n          tokens = prevTokens ++ tail layoutAndTokens;\n        in\n          if layoutAndTokens == null then\n            # if we cannot reduce the pattern, return the list of token\n            if tail patterns == [] then prevTokens\n            # otherwise, take the next pattern, which only captures half the token.\n            else tokenizer_rec len prevTokens (tail patterns) str\n          else tokenizer_rec len tokens patterns (substring matchLength len str);\n\n      avgTokenSize = 100;\n      ceilLog2 = v:\n        let inner = n: i: if i < v then inner (n + 1) (i * 2) else n; in\n        inner 1 1;\n\n      # The builtins.match function match the entire string, and generate a list of all captured\n      # elements. This is the most efficient way to make a tokenizer, if we can make a pattern which\n      # capture all token of the file. Unfortunately C++ std::regex does not support captures in\n      # repeated patterns. As a work-around, we generate patterns which are matching tokens in multiple\n      # of 2, such that we can avoid iterating too many times over the content.\n      generatePatterns = str:\n        let\n          depth = ceilLog2 (stringLength str / avgTokenSize);\n          inner = depth:\n            if depth == 0 then [ \"(${token_pat})\" ]\n            else\n              let next = inner (depth - 1); in\n              [ \"${head next}${layout_pat}${head next}\" ] ++ next;\n        in\n          map (pat: \"(${layout_pat_opt}${pat}).*\" ) (inner depth);\n\n    in\n      tokenizer_rec (stringLength str) [] (generatePatterns str) str;\n\n  tokenizer_1_12 = str:\n    let\n      # Nix 1.12 has the builtins.split function which allow to tokenize the\n      # file quickly. by iterating with a simple regexp.\n      layoutTokenList = split \"(${token_pat})\" str;\n      isLayout = s: match layout_pat_opt s != null;\n      filterLayout = list:\n        filter (s:\n          if isString s then\n            if isLayout s then false\n            else throw \"Error: Unexpected token: '${s}'\"\n          else true) list;\n      removeTokenWrapper = list:\n        map (x: assert tail x == []; head x) list;\n    in\n      removeTokenWrapper (filterLayout layoutTokenList);\n\n  tokenizer =\n    if builtins ? split\n    then tokenizer_1_12\n    else tokenizer_1_11;\nin\n\n# Parse entry headers\nlet\n  unescapeString = str:\n    # Let's ignore any escape character for the moment.\n    assert match ''\"[^\"]*\"'' str != null; #\"\n    substring 1 (stringLength str - 2) str;\n\n  # Match the content of TOML format section names.\n  ident_pat = ''[a-zA-Z0-9_-]+|\"[^\"]*\"''; #\"\n\n  removeBraces = token: wrapLen:\n    substring wrapLen (stringLength token - 2 * wrapLen) token;\n\n  # Note, this implementation is limited to 11 identifiers.\n  matchPathFun_1_11 = token:\n    let\n      # match header_pat \"a.b.c\" == [ \"a\" \".b\" \"b\" \".c\" \"c\" ]\n      header_pat =\n        foldl' (pat: n: \"(${ident_pat})([.]${pat})?\")\n           \"(${ident_pat})\" (genList (n: 0) 10);\n      matchPath = match header_pat token;\n      filterDot = filter (s: substring 0 1 s != \".\") matchPath;\n    in\n      filterDot;\n\n  matchPathFun_1_12 = token:\n    map (e: head e)\n      (filter (s: isList s)\n        (split \"(${ident_pat})\" token));\n\n  matchPathFun =\n    if builtins ? split\n    then matchPathFun_1_12\n    else matchPathFun_1_11;\n\n  headerToPath = token: wrapLen:\n    let\n      token' = removeBraces token wrapLen;\n      matchPath = matchPathFun token';\n      path =\n        map (s:\n          if substring 0 1 s != ''\"'' then s #\"\n          else unescapeString s\n        ) matchPath;\n    in\n      assert matchPath != null;\n      # assert trace \"Path: ${token'}; match as ${toString path}\" true;\n      path;\nin\n\n# Reconstruct the equivalent attribute set.\nlet\n  tokenToValue = token:\n    if token == \"true\" then true\n    else if token == \"false\" then false\n    # TODO: convert the TOML list into a Nix list.\n    else if match \"[[][^]]+[]]\" token != null then token\n    else unescapeString token;\n\n  parserInitState = {\n    idx = 0;\n    path = [];\n    isList = false;\n    output = [];\n    elem = {};\n  };\n\n  # Imported from nixpkgs library.\n  setAttrByPath = attrPath: value:\n    if attrPath == [] then value\n    else listToAttrs\n      [ { name = head attrPath; value = setAttrByPath (tail attrPath) value; } ];\n\n  closeSection = state:\n    state // {\n      output = state.output ++ [ (setAttrByPath state.path (\n        if state.isList then [ state.elem ]\n        else state.elem\n      )) ];\n    };\n\n  readToken = state: token:\n    # assert trace \"Read '${token}'\" true;\n    if state.idx == 0 then\n      if substring 0 2 token == \"[[\" then\n        (closeSection state) // {\n          path = headerToPath token 2;\n          isList = true;\n          elem = {};\n        }\n      else if substring 0 1 token == \"[\" then\n        (closeSection state) // {\n          path = headerToPath token 1;\n          isList = false;\n          elem = {};\n        }\n      else\n        assert match \"[a-zA-Z0-9_-]+\" token != null;\n        state // { idx = 1; name = token; }\n    else if state.idx == 1 then\n      assert token == \"=\";\n      state // { idx = 2; }\n    else\n      assert state.idx == 2;\n      state // {\n        idx = 0;\n        elem = state.elem // {\n          \"${state.name}\" = tokenToValue token;\n        };\n      };\n\n  # aggregate each section as individual attribute sets.\n  parser = str:\n    closeSection (foldl' readToken parserInitState (tokenizer str));\n\n  fromTOML = toml:\n    let\n      sections = (parser toml).output;\n      # Inlined from nixpkgs library functions.\n      zipAttrs = sets:\n        listToAttrs (map (n: {\n          name = n;\n          value =\n            let v = catAttrs n sets; in\n            # assert trace \"Visiting ${n}\" true;\n            if tail v == [] then head v\n            else if isList (head v) then concatLists v\n            else if isAttrs (head v) then zipAttrs v\n            else throw \"cannot merge sections\";\n        }) (concatLists (map attrNames sets)));\n    in\n      zipAttrs sections;\nin\n\n{\n  testing = fromTOML (builtins.readFile ./channel-rust-nightly.toml);\n  testing_url = fromTOML (builtins.readFile (builtins.fetchurl\n  \"https://static.rust-lang.org/dist/channel-rust-nightly.toml\"));\n  inherit fromTOML;\n}\n"
  },
  {
    "path": "lib-overlay.nix",
    "content": "self: super:\n\n{\n  lib = super.lib // (import ./pkgs/lib/default.nix { pkgs = self; });\n}\n"
  },
  {
    "path": "overlays.nix",
    "content": "[\n  ./lib-overlay.nix\n  ./rust-overlay.nix\n  ./firefox-overlay.nix\n  ./git-cinnabar-overlay.nix\n]\n"
  },
  {
    "path": "package-set.nix",
    "content": "{ pkgs }:\n\nwith pkgs.lib;\nlet\n  self = foldl'\n    (prev: overlay: prev // (overlay (pkgs // self) (pkgs // prev)))\n    {} (map import (import ./overlays.nix));\nin self\n"
  },
  {
    "path": "phlay-overlay.nix",
    "content": "self: super:\n\n{\n  phlay = super.callPackage ./pkgs/phlay {};\n}\n"
  },
  {
    "path": "pinned.nix",
    "content": "# This script extends nixpkgs with mozilla packages.\n#\n# First it imports the <nixpkgs> in the environment and depends on it\n# providing fetchFromGitHub and lib.importJSON.\n#\n# After that it loads a pinned release of nixos-unstable and uses that as the\n# base for the rest of packaging. One can pass it's own pkgsPath attribute if\n# desired, probably in the context of hydra.\n\n{ pkgsPath ? null\n, overlays ? []\n, system ? null\n, geckoSrc ? null\n}:\n\n# Pin a specific version of Nixpkgs.\nlet\n  _pkgs = import <nixpkgs> {};\n  _pkgsPath =\n    if pkgsPath != null then pkgsPath\n    else _pkgs.fetchFromGitHub (_pkgs.lib.importJSON ./pkgs/nixpkgs.json);\n  nixpkgs = import _pkgsPath ({\n    overlays = import ./default.nix ++ overlays;\n  } // (if system != null then { inherit system; } else {}));\nin\n  nixpkgs // {\n    # Do not add a name attribute attribute in an overlay !!! As this will cause\n    # tons of recompilations.\n    name = \"nixpkgs\";\n    updateScript = nixpkgs.lib.updateFromGitHub {\n      owner = \"NixOS\";\n      repo = \"nixpkgs-channels\";\n      branch = \"nixos-unstable-small\";\n      path = \"pkgs/nixpkgs.json\";\n    };\n  }\n"
  },
  {
    "path": "pkgs/cbindgen/default.nix",
    "content": "### NOTE: This file is a copy of the one from Nixpkgs repository\n### (taken 2020 February) from commit 82d9ce45fe0b67e3708ab6ba47ffcb4bba09945d.\n### It is used when the version of cbindgen in\n### upstream nixpkgs is not up-to-date enough to compile Firefox.\n\n{ stdenv, lib, fetchFromGitHub, rustPlatform\n# , Security\n}:\n\nrustPlatform.buildRustPackage rec {\n  name = \"rust-cbindgen-${version}\";\n  version = \"0.14.3\";\n\n  src = fetchFromGitHub {\n    owner = \"eqrion\";\n    repo = \"cbindgen\";\n    rev = \"v${version}\";\n    sha256 = \"0pw55334i10k75qkig8bgcnlsy613zw2p5j4xyz8v71s4vh1a58j\";\n  };\n\n  cargoSha256 = \"0088ijnjhqfvdb1wxy9jc7hq8c0yxgj5brlg68n9vws1mz9rilpy\";\n\n  # buildInputs = lib.optional stdenv.isDarwin Security;\n\n  checkFlags = [\n    # https://github.com/eqrion/cbindgen/issues/338\n    \"--skip test_expand\"\n  ];\n  # https://github.com/NixOS/nixpkgs/issues/61618\n  postConfigure = ''\n    mkdir .cargo\n    touch .cargo/.package-cache\n    export HOME=`pwd`\n  '';\n\n  meta = with lib; {\n    description = \"A project for generating C bindings from Rust code\";\n    homepage = \"https://github.com/eqrion/cbindgen\";\n    license = licenses.mpl20;\n    maintainers = with maintainers; [ jtojnar andir ];\n  };\n}\n"
  },
  {
    "path": "pkgs/clang/bug-14435.patch",
    "content": "diff -x _inst -x _build -x .svn -ur libcxx.old/include/cstdio libcxx.new/include/cstdio\n--- libcxx.old/include/cstdio   2016-07-08 12:47:12.964181871 +0000\n+++ libcxx.new/include/cstdio   2016-07-08 12:47:27.540149147 +0000\n@@ -109,15 +109,15 @@\n #endif\n \n #ifdef getc\n-inline _LIBCPP_INLINE_VISIBILITY int __libcpp_getc(FILE* __stream) {return getc(__stream);}\n+inline __attribute__ ((__always_inline__)) int __libcpp_getc(FILE* __stream) {return getc(__stream);}\n #undef getc\n-inline _LIBCPP_INLINE_VISIBILITY int getc(FILE* __stream) {return __libcpp_getc(__stream);}\n+inline __attribute__ ((__always_inline__)) int getc(FILE* __stream) {return __libcpp_getc(__stream);}\n #endif  // getc\n \n #ifdef putc\n-inline _LIBCPP_INLINE_VISIBILITY int __libcpp_putc(int __c, FILE* __stream) {return putc(__c, __stream);}\n+inline __attribute__ ((__always_inline__)) int __libcpp_putc(int __c, FILE* __stream) {return putc(__c, __stream);}\n #undef putc\n-inline _LIBCPP_INLINE_VISIBILITY int putc(int __c, FILE* __stream) {return __libcpp_putc(__c, __stream);}\n+inline __attribute__ ((__always_inline__)) int putc(int __c, FILE* __stream) {return __libcpp_putc(__c, __stream);}\n #endif  // putc\n \n #ifdef clearerr\ndiff -x _inst -x _build -x .svn -ur libcxx.old/include/utility libcxx.new/include/utility\n--- libcxx.old/include/utility  2016-07-08 12:46:02.570334913 +0000\n+++ libcxx.new/include/utility  2016-07-08 12:51:00.760636878 +0000\n@@ -217,7 +217,7 @@\n }\n \n template<class _Tp, size_t _Np>\n-inline _LIBCPP_INLINE_VISIBILITY\n+inline __attribute__ ((__always_inline__))\n void\n swap(_Tp (&__a)[_Np], _Tp (&__b)[_Np]) _NOEXCEPT_(__is_nothrow_swappable<_Tp>::value)\n {\n"
  },
  {
    "path": "pkgs/firefox-nightly-bin/update.nix",
    "content": "{ name\n, writeScript\n, xidel\n, coreutils\n, gnused\n, gnugrep\n, curl\n, jq\n}:\n\nlet\n  version = (builtins.parseDrvName name).version;\nin writeScript \"update-firefox-nightly-bin\" ''\n  PATH=${coreutils}/bin:${gnused}/bin:${gnugrep}/bin:${xidel}/bin:${curl}/bin:${jq}/bin\n\n  #set -eux\n  pushd pkgs/firefox-nightly-bin\n\n  tmpfile=`mktemp`\n  url=https://archive.mozilla.org/pub/firefox/nightly/latest-mozilla-central/\n\n  nightly_file=`curl $url | \\\n                xidel - --extract //a | \\\n                grep firefox | \\\n                grep linux-x86_64.json | \\\n                tail -1 | \\\n                sed -e 's/.json//'`\n  nightly_json=`curl --silent $url$nightly_file.json`\n\n  cat > $tmpfile <<EOF\n  {\n    version = `echo $nightly_json | jq .\"moz_app_version\"` + \"-\" + `echo $nightly_json | jq .\"buildid\"`;\n    sources = [\n      { url = \"$url$nightly_file.tar.xz\";\n        locale = \"`echo $nightly_file | cut -d\".\" -f3`\";\n        arch = \"`echo $nightly_file | cut -d\".\" -f4`\";\n        sha512 = \"`curl --silent $url$nightly_file.checksums | grep $nightly_file.tar.xz$ | grep sha512 | cut -d\" \" -f1`\";\n      }\n    ];\n  }\n  EOF\n\n  mv $tmpfile sources.nix\n\n  popd\n\n''\n"
  },
  {
    "path": "pkgs/gcc-4.7/arm-eabi.patch",
    "content": "Index: gcc-4_7-branch/libstdc++-v3/configure.host\n===================================================================\n--- gcc-4_7-branch/libstdc++-v3/configure.host\t(revision 194579)\n+++ gcc-4_7-branch/libstdc++-v3/configure.host\t(revision 194580)\n@@ -340,7 +340,7 @@\n         fi\n     esac\n     case \"${host}\" in\n-      arm*-*-linux-*eabi)\n+      arm*-*-linux-*eabi*)\n \tport_specific_symbol_files=\"\\$(srcdir)/../config/os/gnu-linux/arm-eabi-extra.ver\"\n \t;;\n     esac\nIndex: gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_signed/requirements/typedefs-2.cc\n===================================================================\n--- gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_signed/requirements/typedefs-2.cc\t(revision 194579)\n+++ gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_signed/requirements/typedefs-2.cc\t(revision 194580)\n@@ -1,5 +1,5 @@\n // { dg-options \"-std=gnu++0x -funsigned-char -fshort-enums\" }\n-// { dg-options \"-std=gnu++0x -funsigned-char -fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi } }\n+// { dg-options \"-std=gnu++0x -funsigned-char -fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi* } }\n \n // 2007-05-03  Benjamin Kosnik  <bkoz@redhat.com>\n //\nIndex: gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_unsigned/requirements/typedefs-2.cc\n===================================================================\n--- gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_unsigned/requirements/typedefs-2.cc\t(revision 194579)\n+++ gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_unsigned/requirements/typedefs-2.cc\t(revision 194580)\n@@ -1,5 +1,5 @@\n // { dg-options \"-std=gnu++0x -funsigned-char -fshort-enums\" }\n-// { dg-options \"-std=gnu++0x -funsigned-char -fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi } }\n+// { dg-options \"-std=gnu++0x -funsigned-char -fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi* } }\n \n // 2007-05-03  Benjamin Kosnik  <bkoz@redhat.com>\n //\nIndex: gcc-4_7-branch/libjava/configure.ac\n===================================================================\n--- gcc-4_7-branch/libjava/configure.ac\t(revision 194579)\n+++ gcc-4_7-branch/libjava/configure.ac\t(revision 194580)\n@@ -931,7 +931,7 @@\n     # on Darwin -single_module speeds up loading of the dynamic libraries.\n     extra_ldflags_libjava=-Wl,-single_module\n     ;;\n-arm*linux*eabi)\n+arm*-*-linux*eabi*)\n     # Some of the ARM unwinder code is actually in libstdc++.  We\n     # could in principle replicate it in libgcj, but it's better to\n     # have a dependency on libstdc++.\nIndex: gcc-4_7-branch/libjava/configure\n===================================================================\n--- gcc-4_7-branch/libjava/configure\t(revision 194579)\n+++ gcc-4_7-branch/libjava/configure\t(revision 194580)\n@@ -20542,7 +20542,7 @@\n     # on Darwin -single_module speeds up loading of the dynamic libraries.\n     extra_ldflags_libjava=-Wl,-single_module\n     ;;\n-arm*linux*eabi)\n+arm*-*-linux*eabi*)\n     # Some of the ARM unwinder code is actually in libstdc++.  We\n     # could in principle replicate it in libgcj, but it's better to\n     # have a dependency on libstdc++.\nIndex: gcc-4_7-branch/libgcc/config.host\n===================================================================\n--- gcc-4_7-branch/libgcc/config.host\t(revision 194579)\n+++ gcc-4_7-branch/libgcc/config.host\t(revision 194580)\n@@ -327,7 +327,7 @@\n arm*-*-linux*)\t\t\t# ARM GNU/Linux with ELF\n \ttmake_file=\"${tmake_file} arm/t-arm t-fixedpoint-gnu-prefix\"\n \tcase ${host} in\n-\tarm*-*-linux-*eabi)\n+\tarm*-*-linux-*eabi*)\n \t  tmake_file=\"${tmake_file} arm/t-elf arm/t-bpabi arm/t-linux-eabi t-slibgcc-libgcc\"\n \t  tm_file=\"$tm_file arm/bpabi-lib.h\"\n \t  unwind_header=config/arm/unwind-arm.h\nIndex: gcc-4_7-branch/gcc/doc/install.texi\n===================================================================\n--- gcc-4_7-branch/gcc/doc/install.texi\t(revision 194579)\n+++ gcc-4_7-branch/gcc/doc/install.texi\t(revision 194580)\n@@ -3222,7 +3222,7 @@\n @heading @anchor{arm-x-eabi}arm-*-eabi\n ARM-family processors.  Subtargets that use the ELF object format\n require GNU binutils 2.13 or newer.  Such subtargets include:\n-@code{arm-*-netbsdelf}, @code{arm-*-*linux-gnueabi}\n+@code{arm-*-netbsdelf}, @code{arm-*-*linux-gnueabi*}\n and @code{arm-*-rtemseabi}.\n \n @html\nIndex: gcc-4_7-branch/gcc/testsuite/gcc.target/arm/synchronize.c\n===================================================================\n--- gcc-4_7-branch/gcc/testsuite/gcc.target/arm/synchronize.c\t(revision 194579)\n+++ gcc-4_7-branch/gcc/testsuite/gcc.target/arm/synchronize.c\t(revision 194580)\n@@ -1,4 +1,4 @@\n-/* { dg-final { scan-assembler \"__sync_synchronize|dmb|mcr\" { target arm*-*-linux-*eabi } } } */\n+/* { dg-final { scan-assembler \"__sync_synchronize|dmb|mcr\" { target arm*-*-linux-*eabi* } } } */\n \n void *foo (void)\n {\nIndex: gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.jason/enum6.C\n===================================================================\n--- gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.jason/enum6.C\t(revision 194579)\n+++ gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.jason/enum6.C\t(revision 194580)\n@@ -7,10 +7,10 @@\n // enum-size attributes should only be emitted if there are values of\n // enum type that can escape the compilation unit, gcc cannot currently\n // detect this; if this facility is added then this linker option should\n-// not be needed.  arm-*-linux*eabi should be a good approximation to\n+// not be needed.  arm-*-linux*eabi* should be a good approximation to\n // those platforms where the EABI supplement defines enum values to be\n // 32 bits wide.\n-// { dg-options \"-fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi } }\n+// { dg-options \"-fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi* } }\n \n #include <limits.h>\n \nIndex: gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.other/enum4.C\n===================================================================\n--- gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.other/enum4.C\t(revision 194579)\n+++ gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.other/enum4.C\t(revision 194580)\n@@ -9,10 +9,10 @@\n // enum-size attributes should only be emitted if there are values of\n // enum type that can escape the compilation unit, gcc cannot currently\n // detect this; if this facility is added then this linker option should\n-// not be needed.  arm-*-linux*eabi should be a good approximation to\n+// not be needed.  arm-*-linux*eabi* should be a good approximation to\n // those platforms where the EABI supplement defines enum values to be\n // 32 bits wide.\n-// { dg-options \"-fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi } }\n+// { dg-options \"-fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi* } }\n \n enum E { \n   a = -312\nIndex: gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.law/enum9.C\n===================================================================\n--- gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.law/enum9.C\t(revision 194579)\n+++ gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.law/enum9.C\t(revision 194580)\n@@ -7,10 +7,10 @@\n // enum-size attributes should only be emitted if there are values of\n // enum type that can escape the compilation unit, gcc cannot currently\n // detect this; if this facility is added then this linker option should\n-// not be needed.  arm-*-linux*eabi should be a good approximation to\n+// not be needed.  arm-*-linux*eabi* should be a good approximation to\n // those platforms where the EABI supplement defines enum values to be\n // 32 bits wide.\n-// { dg-options \"-fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi } }\n+// { dg-options \"-fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi* } }\n \n // GROUPS passed enums\n   extern \"C\" int printf (const char *, ...);\nIndex: gcc-4_7-branch/gcc/testsuite/lib/target-supports.exp\n===================================================================\n--- gcc-4_7-branch/gcc/testsuite/lib/target-supports.exp\t(revision 194579)\n+++ gcc-4_7-branch/gcc/testsuite/lib/target-supports.exp\t(revision 194580)\n@@ -3818,7 +3818,7 @@\n \t\t}\n \t    } \"\"\n \t}]\n-    } elseif { [istarget arm*-*-linux-gnueabi] } {\n+    } elseif { [istarget arm*-*-linux-gnueabi*] } {\n \treturn [check_runtime sync_longlong_runtime {\n \t    #include <stdlib.h>\n \t    int main ()\n@@ -3860,7 +3860,7 @@\n \t     || [istarget i?86-*-*]\n \t     || [istarget x86_64-*-*]\n \t     || [istarget alpha*-*-*] \n-\t     || [istarget arm*-*-linux-gnueabi] \n+\t     || [istarget arm*-*-linux-gnueabi*] \n \t     || [istarget bfin*-*linux*]\n \t     || [istarget hppa*-*linux*]\n \t     || [istarget s390*-*-*] \n@@ -3890,7 +3890,7 @@\n \t     || [istarget i?86-*-*]\n \t     || [istarget x86_64-*-*]\n \t     || [istarget alpha*-*-*] \n-\t     || [istarget arm*-*-linux-gnueabi] \n+\t     || [istarget arm*-*-linux-gnueabi*] \n \t     || [istarget hppa*-*linux*]\n \t     || [istarget s390*-*-*] \n \t     || [istarget powerpc*-*-*]\nIndex: gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_9.f90\n===================================================================\n--- gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_9.f90\t(revision 194579)\n+++ gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_9.f90\t(revision 194580)\n@@ -1,6 +1,6 @@\n ! { dg-do run }\n ! { dg-options \"-fshort-enums\" }\n-! { dg-options \"-fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi } }\n+! { dg-options \"-fshort-enums -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi* } }\n ! Program to test enumerations when option -fshort-enums is given\n \n program main\nIndex: gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_10.f90\n===================================================================\n--- gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_10.f90\t(revision 194579)\n+++ gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_10.f90\t(revision 194580)\n@@ -1,7 +1,7 @@\n ! { dg-do run }\n ! { dg-additional-sources enum_10.c }\n ! { dg-options \"-fshort-enums -w\" }\n-! { dg-options \"-fshort-enums -w -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi } }\n+! { dg-options \"-fshort-enums -w -Wl,--no-enum-size-warning\" { target arm*-*-linux*eabi* } }\n ! Make sure short enums are indeed interoperable with the\n ! corresponding C type.\n \nIndex: gcc-4_7-branch/gcc/ada/gcc-interface/Makefile.in\n===================================================================\n--- gcc-4_7-branch/gcc/ada/gcc-interface/Makefile.in\t(revision 194579)\n+++ gcc-4_7-branch/gcc/ada/gcc-interface/Makefile.in\t(revision 194580)\n@@ -1866,7 +1866,7 @@\n   LIBRARY_VERSION := $(LIB_VERSION)\n endif\n \n-ifeq ($(strip $(filter-out arm% linux-gnueabi,$(arch) $(osys)-$(word 4,$(targ)))),)\n+ifeq ($(strip $(filter-out arm%-linux,$(arch)-$(osys)) $(if $(findstring eabi,$(word 4,$(targ))),,$(word 4,$(targ)))),)\n   LIBGNAT_TARGET_PAIRS = \\\n   a-intnam.ads<a-intnam-linux.ads \\\n   s-inmaop.adb<s-inmaop-posix.adb \\\nIndex: gcc-4_7-branch/gcc/config.gcc\n===================================================================\n--- gcc-4_7-branch/gcc/config.gcc\t(revision 194579)\n+++ gcc-4_7-branch/gcc/config.gcc\t(revision 194580)\n@@ -855,7 +855,7 @@\n \tesac\n \ttmake_file=\"${tmake_file} arm/t-arm\"\n \tcase ${target} in\n-\tarm*-*-linux-*eabi)\n+\tarm*-*-linux-*eabi*)\n \t    tm_file=\"$tm_file arm/bpabi.h arm/linux-eabi.h\"\n \t    tmake_file=\"$tmake_file arm/t-arm-elf arm/t-bpabi arm/t-linux-eabi\"\n \t    # Define multilib configuration for arm-linux-androideabi.\n"
  },
  {
    "path": "pkgs/gcc-4.7/builder.sh",
    "content": "source $stdenv/setup\n\n\nexport NIX_FIXINC_DUMMY=$NIX_BUILD_TOP/dummy\nmkdir $NIX_FIXINC_DUMMY\n\n\nif test \"$staticCompiler\" = \"1\"; then\n    EXTRA_LDFLAGS=\"-static\"\nelse\n    EXTRA_LDFLAGS=\"\"\nfi\n\n# GCC interprets empty paths as \".\", which we don't want.\nif test -z \"$CPATH\"; then unset CPATH; fi\nif test -z \"$LIBRARY_PATH\"; then unset LIBRARY_PATH; fi\necho \"\\$CPATH is \\`$CPATH'\"\necho \"\\$LIBRARY_PATH is \\`$LIBRARY_PATH'\"\n\nif test \"$noSysDirs\" = \"1\"; then\n\n    if test -e $NIX_GCC/nix-support/orig-libc; then\n\n        # Figure out what extra flags to pass to the gcc compilers\n        # being generated to make sure that they use our glibc.\n        extraFlags=\"$(cat $NIX_GCC/nix-support/libc-cflags)\"\n        extraLDFlags=\"$(cat $NIX_GCC/nix-support/libc-ldflags) $(cat $NIX_GCC/nix-support/libc-ldflags-before)\"\n\n        # Use *real* header files, otherwise a limits.h is generated\n        # that does not include Glibc's limits.h (notably missing\n        # SSIZE_MAX, which breaks the build).\n        export NIX_FIXINC_DUMMY=$(cat $NIX_GCC/nix-support/orig-libc)/include\n\n        # The path to the Glibc binaries such as `crti.o'.\n        glibc_libdir=\"$(cat $NIX_GCC/nix-support/orig-libc)/lib\"\n        \n    else\n        # Hack: support impure environments.\n        extraFlags=\"-isystem /usr/include\"\n        extraLDFlags=\"-L/usr/lib64 -L/usr/lib\"\n        glibc_libdir=\"/usr/lib\"\n        export NIX_FIXINC_DUMMY=/usr/include\n    fi\n\n    extraFlags=\"-I$NIX_FIXINC_DUMMY $extraFlags\"\n    extraLDFlags=\"-L$glibc_libdir -rpath $glibc_libdir $extraLDFlags\"\n\n    # BOOT_CFLAGS defaults to `-g -O2'; since we override it below,\n    # make sure to explictly add them so that files compiled with the\n    # bootstrap compiler are optimized and (optionally) contain\n    # debugging information (info \"(gccinstall) Building\").\n    if test -n \"$dontStrip\"; then\n\textraFlags=\"-O2 -g $extraFlags\"\n    else\n\t# Don't pass `-g' at all; this saves space while building.\n\textraFlags=\"-O2 $extraFlags\"\n    fi\n\n    EXTRA_FLAGS=\"$extraFlags\"\n    for i in $extraLDFlags; do\n        EXTRA_LDFLAGS=\"$EXTRA_LDFLAGS -Wl,$i\"\n    done\n\n    if test -n \"$targetConfig\"; then\n        # Cross-compiling, we need gcc not to read ./specs in order to build\n        # the g++ compiler (after the specs for the cross-gcc are created).\n        # Having LIBRARY_PATH= makes gcc read the specs from ., and the build\n        # breaks. Having this variable comes from the default.nix code to bring\n        # gcj in.\n        unset LIBRARY_PATH\n        unset CPATH\n        if test -z \"$crossStageStatic\"; then\n            EXTRA_TARGET_CFLAGS=\"-B${libcCross}/lib -idirafter ${libcCross}/include\"\n            EXTRA_TARGET_LDFLAGS=\"-Wl,-L${libcCross}/lib -Wl,-rpath,${libcCross}/lib -Wl,-rpath-link,${libcCross}/lib\"\n        fi\n    else\n        if test -z \"$NIX_GCC_CROSS\"; then\n            EXTRA_TARGET_CFLAGS=\"$EXTRA_FLAGS\"\n            EXTRA_TARGET_CXXFLAGS=\"$EXTRA_FLAGS\"\n            EXTRA_TARGET_LDFLAGS=\"$EXTRA_LDFLAGS\"\n        else\n            # This the case of cross-building the gcc.\n            # We need special flags for the target, different than those of the build\n            # Assertion:\n            test -e $NIX_GCC_CROSS/nix-support/orig-libc\n\n            # Figure out what extra flags to pass to the gcc compilers\n            # being generated to make sure that they use our glibc.\n            extraFlags=\"$(cat $NIX_GCC_CROSS/nix-support/libc-cflags)\"\n            extraLDFlags=\"$(cat $NIX_GCC_CROSS/nix-support/libc-ldflags) $(cat $NIX_GCC_CROSS/nix-support/libc-ldflags-before)\"\n\n            # Use *real* header files, otherwise a limits.h is generated\n            # that does not include Glibc's limits.h (notably missing\n            # SSIZE_MAX, which breaks the build).\n            NIX_FIXINC_DUMMY_CROSS=$(cat $NIX_GCC_CROSS/nix-support/orig-libc)/include\n\n            # The path to the Glibc binaries such as `crti.o'.\n            glibc_dir=\"$(cat $NIX_GCC_CROSS/nix-support/orig-libc)\"\n            glibc_libdir=\"$glibc_dir/lib\"\n            configureFlags=\"$configureFlags --with-native-system-header-dir=$glibc_dir/include\"\n\n            extraFlags=\"-I$NIX_FIXINC_DUMMY_CROSS $extraFlags\"\n            extraLDFlags=\"-L$glibc_libdir -rpath $glibc_libdir $extraLDFlags\"\n\n            EXTRA_TARGET_CFLAGS=\"$extraFlags\"\n            for i in $extraLDFlags; do\n                EXTRA_TARGET_LDFLAGS=\"$EXTRA_TARGET_LDFLAGS -Wl,$i\"\n            done\n        fi\n    fi\n\n\n    # CFLAGS_FOR_TARGET are needed for the libstdc++ configure script to find\n    # the startfiles.\n    # FLAGS_FOR_TARGET are needed for the target libraries to receive the -Bxxx\n    # for the startfiles.\n    makeFlagsArray=( \\\n        \"${makeFlagsArray[@]}\" \\\n        NATIVE_SYSTEM_HEADER_DIR=\"$NIX_FIXINC_DUMMY\" \\\n        SYSTEM_HEADER_DIR=\"$NIX_FIXINC_DUMMY\" \\\n        CFLAGS_FOR_BUILD=\"$EXTRA_FLAGS $EXTRA_LDFLAGS\" \\\n        CXXFLAGS_FOR_BUILD=\"$EXTRA_FLAGS $EXTRA_LDFLAGS\" \\\n        CFLAGS_FOR_TARGET=\"$EXTRA_TARGET_CFLAGS $EXTRA_TARGET_LDFLAGS\" \\\n        CXXFLAGS_FOR_TARGET=\"$EXTRA_TARGET_CFLAGS $EXTRA_TARGET_LDFLAGS\" \\\n        FLAGS_FOR_TARGET=\"$EXTRA_TARGET_CFLAGS $EXTRA_TARGET_LDFLAGS\" \\\n        LDFLAGS_FOR_BUILD=\"$EXTRA_FLAGS $EXTRA_LDFLAGS\" \\\n        LDFLAGS_FOR_TARGET=\"$EXTRA_TARGET_LDFLAGS $EXTRA_TARGET_LDFLAGS\" \\\n        )\n\n    if test -z \"$targetConfig\"; then\n        makeFlagsArray=( \\\n            \"${makeFlagsArray[@]}\" \\\n            BOOT_CFLAGS=\"$EXTRA_FLAGS $EXTRA_LDFLAGS\" \\\n            BOOT_LDFLAGS=\"$EXTRA_TARGET_CFLAGS $EXTRA_TARGET_LDFLAGS\" \\\n            )\n    fi\n\n    if test -n \"$targetConfig\" -a \"$crossStageStatic\" == 1; then\n        # We don't want the gcc build to assume there will be a libc providing\n        # limits.h in this stagae\n        makeFlagsArray=( \\\n            \"${makeFlagsArray[@]}\" \\\n            LIMITS_H_TEST=false \\\n            )\n    else\n        makeFlagsArray=( \\\n            \"${makeFlagsArray[@]}\" \\\n            LIMITS_H_TEST=true \\\n            )\n    fi\nfi\n\nif test -n \"$targetConfig\"; then\n    # The host strip will destroy some important details of the objects\n    dontStrip=1\nfi\n\nprovidedPreConfigure=\"$preConfigure\";\npreConfigure() {\n    if test -n \"$newlibSrc\"; then\n        tar xvf \"$newlibSrc\" -C ..\n        ln -s ../newlib-*/newlib newlib\n        # Patch to get armvt5el working:\n        sed -i -e 's/ arm)/ arm*)/' newlib/configure.host\n    fi\n    # Bug - they packaged zlib\n    if test -d \"zlib\"; then\n        # This breaks the build without-headers, which should build only\n        # the target libgcc as target libraries.\n        # See 'configure:5370'\n        rm -Rf zlib\n    fi\n\n    if test -f \"$NIX_GCC/nix-support/orig-libc\"; then\n        # Patch the configure script so it finds glibc headers.  It's\n        # important for example in order not to get libssp built,\n        # because its functionality is in glibc already.\n        glibc_headers=\"$(cat $NIX_GCC/nix-support/orig-libc)/include\"\n        sed -i \\\n            -e \"s,glibc_header_dir=/usr/include,glibc_header_dir=$glibc_headers\", \\\n            gcc/configure\n    fi\n\n    if test -n \"$crossMingw\" -a -n \"$crossStageStatic\"; then\n        mkdir -p ../mingw\n        # --with-build-sysroot expects that:\n        cp -R $libcCross/include ../mingw\n        configureFlags=\"$configureFlags --with-build-sysroot=`pwd`/..\"\n    fi\n\n    # Eval the preConfigure script from nix expression.\n    eval $providedPreConfigure;\n    env;\n    # Perform the build in a different directory.\n    mkdir ../build\n    cd ../build\n    configureScript=../$sourceRoot/configure\n}\n\n\npostConfigure() {\n    # Don't store the configure flags in the resulting executables.\n    sed -e '/TOPLEVEL_CONFIGURE_ARGUMENTS=/d' -i Makefile\n}\n\n\npostInstall() {\n    # Remove precompiled headers for now.  They are very big and\n    # probably not very useful yet.\n    find $out/include -name \"*.gch\" -exec rm -rf {} \\; -prune\n\n    # Remove `fixincl' to prevent a retained dependency on the\n    # previous gcc.\n    rm -rf $out/libexec/gcc/*/*/install-tools\n    rm -rf $out/lib/gcc/*/*/install-tools\n    \n    # More dependencies with the previous gcc or some libs (gccbug stores the build command line)\n    rm -rf $out/bin/gccbug\n    # Take out the bootstrap-tools from the rpath, as it's not needed at all having $out\n    for i in $out/libexec/gcc/*/*/*; do\n        if PREV_RPATH=`patchelf --print-rpath $i`; then\n            patchelf --set-rpath `echo $PREV_RPATH | sed 's,:[^:]*bootstrap-tools/lib,,'` $i\n        fi\n    done\n\n    # Get rid of some \"fixed\" header files\n    rm -rf $out/lib/gcc/*/*/include/root\n\n    # Replace hard links for i686-pc-linux-gnu-gcc etc. with symlinks.\n    for i in $out/bin/*-gcc*; do\n        if cmp -s $out/bin/gcc $i; then\n            ln -sfn gcc $i\n        fi\n    done\n\n    for i in $out/bin/c++ $out/bin/*-c++* $out/bin/*-g++*; do\n        if cmp -s $out/bin/g++ $i; then\n            ln -sfn g++ $i\n        fi\n    done\n\n    eval \"$postInstallGhdl\"\n}\n\ngenericBuild\n"
  },
  {
    "path": "pkgs/gcc-4.7/default.nix",
    "content": "{ stdenv, lib, fetchurl, noSysDirs\n, langC ? true, langCC ? true, langFortran ? false\n, langJava ? false\n, langAda ? false\n, langVhdl ? false\n, langGo ? false\n, profiledCompiler ? false\n, staticCompiler ? false\n, enableShared ? true\n, texinfo ? null\n, perl ? null # optional, for texi2pod (then pod2man); required for Java\n, gmp, mpfr, mpc, gettext, which\n, libelf                      # optional, for link-time optimizations (LTO)\n, ppl ? null, cloog ? null # optional, for the Graphite optimization framework.\n, zlib ? null, boehmgc ? null\n, zip ? null, unzip ? null, pkgconfig ? null, gtk ? null, libart_lgpl ? null\n, libX11 ? null, libXt ? null, libSM ? null, libICE ? null, libXtst ? null\n, libXrender ? null, xproto ? null, renderproto ? null, xextproto ? null\n, libXrandr ? null, libXi ? null, inputproto ? null, randrproto ? null\n, gnatboot ? null\n, enableMultilib ? false\n, enablePlugin ? true             # whether to support user-supplied plug-ins\n, name ? \"gcc\"\n, cross ? null\n, binutilsCross ? null\n, libcCross ? null\n, crossStageStatic ? true\n, gnat ? null\n, libpthread ? null, libpthreadCross ? null  # required for GNU/Hurd\n, stripped ? true\n, gnused ? null\n}:\n\nassert langJava     -> zip != null && unzip != null\n                       && zlib != null && boehmgc != null\n                       && perl != null;  # for `--enable-java-home'\nassert langAda      -> gnatboot != null;\nassert langVhdl     -> gnat != null;\n\n# LTO needs libelf and zlib.\nassert libelf != null -> zlib != null;\n\n# Make sure we get GNU sed.\nassert stdenv.isDarwin -> gnused != null;\n\n# The go frontend is written in c++\nassert langGo -> langCC;\n\nwith lib;\nwith builtins;\n\nlet version = \"4.7.3\";\n\n    # Whether building a cross-compiler for GNU/Hurd.\n    crossGNU = cross != null && cross.config == \"i586-pc-gnu\";\n\n  /* gccinstall.info says that \"parallel make is currently not supported since\n     collisions in profile collecting may occur\".\n\n     Parallel make of gfortran is disabled because of an apparent race\n     condition concerning the generation of \"bconfig.h\". Please try and\n     re-enable parallel make for a later release of gfortran to check whether\n     the error has been fixed.\n  */\n    enableParallelBuilding = !profiledCompiler && !langFortran;\n\n    patches = []\n      ++ optional enableParallelBuilding ./parallel-bconfig-4.7.patch\n      ++ optional stdenv.isArm [ ./arm-eabi.patch ]\n      ++ optional (cross != null) ./libstdc++-target.patch\n      # ++ optional noSysDirs ./no-sys-dirs.patch\n      # The GNAT Makefiles did not pay attention to CFLAGS_FOR_TARGET for its\n      # target libraries and tools.\n      ++ optional langAda ./gnat-cflags.patch\n      ++ optional langFortran ./gfortran-driving.patch;\n\n    javaEcj = fetchurl {\n      # The `$(top_srcdir)/ecj.jar' file is automatically picked up at\n      # `configure' time.\n\n      # XXX: Eventually we might want to take it from upstream.\n      url = \"ftp://sourceware.org/pub/java/ecj-4.3.jar\";\n      sha256 = \"0jz7hvc0s6iydmhgh5h2m15yza7p2rlss2vkif30vm9y77m97qcx\";\n    };\n\n    # Antlr (optional) allows the Java `gjdoc' tool to be built.  We want a\n    # binary distribution here to allow the whole chain to be bootstrapped.\n    javaAntlr = fetchurl {\n      url = \"http://www.antlr.org/download/antlr-3.1.3.jar\";\n      sha256 = \"1f41j0y4kjydl71lqlvr73yagrs2jsg1fjymzjz66mjy7al5lh09\";\n    };\n\n    xlibs = [\n      libX11 libXt libSM libICE libXtst libXrender libXrandr libXi\n      xproto renderproto xextproto inputproto randrproto\n    ];\n\n    javaAwtGtk = langJava && gtk != null;\n\n    /* Platform flags */\n    platformFlags = let\n        gccArch = lib.attrByPath [ \"platform\" \"gcc\" \"arch\" ] null stdenv;\n        gccCpu = lib.attrByPath [ \"platform\" \"gcc\" \"cpu\" ] null stdenv;\n        gccAbi = lib.attrByPath [ \"platform\" \"gcc\" \"abi\" ] null stdenv;\n        gccFpu = lib.attrByPath [ \"platform\" \"gcc\" \"fpu\" ] null stdenv;\n        gccFloat = lib.attrByPath [ \"platform\" \"gcc\" \"float\" ] null stdenv;\n        gccMode = lib.attrByPath [ \"platform\" \"gcc\" \"mode\" ] null stdenv;\n        withArch = if gccArch != null then \" --with-arch=${gccArch}\" else \"\";\n        withCpu = if gccCpu != null then \" --with-cpu=${gccCpu}\" else \"\";\n        withAbi = if gccAbi != null then \" --with-abi=${gccAbi}\" else \"\";\n        withFpu = if gccFpu != null then \" --with-fpu=${gccFpu}\" else \"\";\n        withFloat = if gccFloat != null then \" --with-float=${gccFloat}\" else \"\";\n        withMode = if gccMode != null then \" --with-mode=${gccMode}\" else \"\";\n      in\n        (withArch +\n        withCpu +\n        withAbi +\n        withFpu +\n        withFloat +\n        withMode);\n\n    /* Cross-gcc settings */\n    crossMingw = (cross != null && cross.libc == \"msvcrt\");\n    crossConfigureFlags = let\n        gccArch = lib.attrByPath [ \"gcc\" \"arch\" ] null cross;\n        gccCpu = lib.attrByPath [ \"gcc\" \"cpu\" ] null cross;\n        gccAbi = lib.attrByPath [ \"gcc\" \"abi\" ] null cross;\n        gccFpu = lib.attrByPath [ \"gcc\" \"fpu\" ] null cross;\n        gccFloat = lib.attrByPath [ \"gcc\" \"float\" ] null cross;\n        gccMode = lib.attrByPath [ \"gcc\" \"mode\" ] null cross;\n        withArch = if gccArch != null then \" --with-arch=${gccArch}\" else \"\";\n        withCpu = if gccCpu != null then \" --with-cpu=${gccCpu}\" else \"\";\n        withAbi = if gccAbi != null then \" --with-abi=${gccAbi}\" else \"\";\n        withFpu = if gccFpu != null then \" --with-fpu=${gccFpu}\" else \"\";\n        withFloat = if gccFloat != null then \" --with-float=${gccFloat}\" else \"\";\n        withMode = if gccMode != null then \" --with-mode=${gccMode}\" else \"\";\n      in\n        \"--target=${cross.config}\" +\n        withArch +\n        withCpu +\n        withAbi +\n        withFpu +\n        withFloat +\n        withMode +\n        (if crossMingw && crossStageStatic then\n          \" --with-headers=${libcCross}/include\" +\n          \" --with-gcc\" +\n          \" --with-gnu-as\" +\n          \" --with-gnu-ld\" +\n          \" --with-gnu-ld\" +\n          \" --disable-shared\" +\n          \" --disable-nls\" +\n          \" --disable-debug\" +\n          \" --enable-sjlj-exceptions\" +\n          \" --enable-threads=win32\" +\n          \" --disable-win32-registry\"\n          else if crossStageStatic then\n          \" --disable-libssp --disable-nls\" +\n          \" --without-headers\" +\n          \" --disable-threads \" +\n          \" --disable-libmudflap \" +\n          \" --disable-libgomp \" +\n          \" --disable-libquadmath\" +\n          \" --disable-shared\" +\n          \" --disable-decimal-float\" # libdecnumber requires libc\n          else\n          \" --with-headers=${libcCross}/include\" +\n          \" --enable-__cxa_atexit\" +\n          \" --enable-long-long\" +\n          (if crossMingw then\n            \" --enable-threads=win32\" +\n            \" --enable-sjlj-exceptions\" +\n            \" --enable-hash-synchronization\" +\n            \" --disable-libssp\" +\n            \" --disable-nls\" +\n            \" --with-dwarf2\" +\n            # I think noone uses shared gcc libs in mingw, so we better do the same.\n            # In any case, mingw32 g++ linking is broken by default with shared libs,\n            # unless adding \"-lsupc++\" to any linking command. I don't know why.\n            \" --disable-shared\" +\n            (if cross.config == \"x86_64-w64-mingw32\" then\n              # To keep ABI compatibility with upstream mingw-w64\n              \" --enable-fully-dynamic-string\"\n              else \"\")\n            else (if cross.libc == \"uclibc\" then\n              # In uclibc cases, libgomp needs an additional '-ldl'\n              # and as I don't know how to pass it, I disable libgomp.\n              \" --disable-libgomp\" else \"\") +\n            \" --enable-threads=posix\" +\n            \" --enable-nls\" +\n            \" --disable-decimal-float\") # No final libdecnumber (it may work only in 386)\n          );\n    stageNameAddon = if crossStageStatic then \"-stage-static\" else\n      \"-stage-final\";\n    crossNameAddon = if cross != null then \"-${cross.config}\" + stageNameAddon else \"\";\n\n  bootstrap = cross == null && !stdenv.isArm && !stdenv.isMips;\n\nin\n\n# We need all these X libraries when building AWT with GTK+.\nassert gtk != null -> (filter (x: x == null) xlibs) == [];\n\nstdenv.mkDerivation ({\n  name = \"${name}${if stripped then \"\" else \"-debug\"}-${version}\" + crossNameAddon;\n\n  builder = ./builder.sh;\n\n  src = fetchurl {\n    url = \"mirror://gnu/gcc/gcc-${version}/gcc-${version}.tar.bz2\";\n    sha256 = \"1hx9h64ivarlzi4hxvq42as5m9vlr5cyzaaq4gzj4i619zmkfz1g\";\n  };\n\n  inherit patches;\n\n  postPatch =\n    if (stdenv.isGNU\n        || (libcCross != null                  # e.g., building `gcc.crossDrv'\n            && libcCross ? crossConfig\n            && libcCross.crossConfig == \"i586-pc-gnu\")\n        || (crossGNU && libcCross != null))\n    then\n      # On GNU/Hurd glibc refers to Hurd & Mach headers and libpthread is not\n      # in glibc, so add the right `-I' flags to the default spec string.\n      assert libcCross != null -> libpthreadCross != null;\n      let\n        libc = if libcCross != null then libcCross else stdenv.glibc;\n        gnu_h = \"gcc/config/gnu.h\";\n        extraCPPDeps =\n             libc.propagatedBuildInputs\n          ++ lib.optional (libpthreadCross != null) libpthreadCross\n          ++ lib.optional (libpthread != null) libpthread;\n        extraCPPSpec =\n          concatStrings (intersperse \" \"\n                          (map (x: \"-I${x}/include\") extraCPPDeps));\n        extraLibSpec =\n          if libpthreadCross != null\n          then \"-L${libpthreadCross}/lib ${libpthreadCross.TARGET_LDFLAGS}\"\n          else \"-L${libpthread}/lib\";\n      in\n        '' echo \"augmenting \\`CPP_SPEC' in \\`${gnu_h}' with \\`${extraCPPSpec}'...\"\n           sed -i \"${gnu_h}\" \\\n               -es'|CPP_SPEC *\"\\(.*\\)$|CPP_SPEC \"${extraCPPSpec} \\1|g'\n\n           echo \"augmenting \\`LIB_SPEC' in \\`${gnu_h}' with \\`${extraLibSpec}'...\"\n           sed -i \"${gnu_h}\" \\\n               -es'|LIB_SPEC *\"\\(.*\\)$|LIB_SPEC \"${extraLibSpec} \\1|g'\n\n           echo \"setting \\`NATIVE_SYSTEM_HEADER_DIR' and \\`STANDARD_INCLUDE_DIR' to \\`${libc}/include'...\"\n           sed -i \"${gnu_h}\" \\\n               -es'|#define STANDARD_INCLUDE_DIR.*$|#define STANDARD_INCLUDE_DIR \"${libc}/include\"|g'\n        ''\n    else if cross != null || stdenv.gcc.libc != null then\n      # On NixOS, use the right path to the dynamic linker instead of\n      # `/lib/ld*.so'.\n      let\n        libc = if libcCross != null then libcCross else stdenv.gcc.libc;\n      in\n        '' echo \"fixing the \\`GLIBC_DYNAMIC_LINKER' and \\`UCLIBC_DYNAMIC_LINKER' macros...\"\n           for header in \"gcc/config/\"*-gnu.h \"gcc/config/\"*\"/\"*.h\n           do\n             grep -q LIBC_DYNAMIC_LINKER \"$header\" || continue\n             echo \"  fixing \\`$header'...\"\n             sed -i \"$header\" \\\n                 -e 's|define[[:blank:]]*\\([UCG]\\+\\)LIBC_DYNAMIC_LINKER\\([0-9]*\\)[[:blank:]]\"\\([^\\\"]\\+\\)\"$|define \\1LIBC_DYNAMIC_LINKER\\2 \"${libc}\\3\"|g'\n           done\n        ''\n    else null;\n\n  inherit noSysDirs staticCompiler langJava crossStageStatic\n    libcCross crossMingw;\n\n  nativeBuildInputs = [ texinfo which gettext ]\n    ++ (optional (perl != null) perl)\n    ++ (optional javaAwtGtk pkgconfig);\n\n  buildInputs = [ gmp mpfr mpc libelf ]\n    ++ (optional (ppl != null) ppl)\n    ++ (optional (cloog != null) cloog)\n    ++ (optional (zlib != null) zlib)\n    ++ (optionals langJava [ boehmgc zip unzip ])\n    ++ (optionals javaAwtGtk ([ gtk libart_lgpl ] ++ xlibs))\n    ++ (optionals (cross != null) [binutilsCross])\n    ++ (optionals langAda [gnatboot])\n    ++ (optionals langVhdl [gnat])\n\n    # The builder relies on GNU sed (for instance, Darwin's `sed' fails with\n    # \"-i may not be used with stdin\"), and `stdenvNative' doesn't provide it.\n    ++ (optional stdenv.isDarwin gnused)\n    ;\n\n  NIX_LDFLAGS = lib.optionalString  stdenv.isSunOS \"-lm -ldl\";\n\n  preConfigure = ''\n    configureFlagsArray=(\n      ${lib.optionalString (ppl != null && ppl ? dontDisableStatic && ppl.dontDisableStatic)\n        \"'--with-host-libstdcxx=-lstdc++ -lgcc_s'\"}\n      ${lib.optionalString (ppl != null && stdenv.isSunOS)\n        \"\\\"--with-host-libstdcxx=-Wl,-rpath,\\$prefix/lib/amd64 -lstdc++\\\"\n         \\\"--with-boot-ldflags=-L../prev-x86_64-pc-solaris2.11/libstdc++-v3/src/.libs\\\"\"}\n    );\n    ${lib.optionalString (stdenv.isSunOS && stdenv.is64bit)\n      ''\n        export NIX_LDFLAGS=`echo $NIX_LDFLAGS | sed -e s~$prefix/lib~$prefix/lib/amd64~g`\n        export LDFLAGS_FOR_TARGET=\"-Wl,-rpath,$prefix/lib/amd64 $LDFLAGS_FOR_TARGET\"\n        export CXXFLAGS_FOR_TARGET=\"-Wl,-rpath,$prefix/lib/amd64 $CXXFLAGS_FOR_TARGET\"\n        export CFLAGS_FOR_TARGET=\"-Wl,-rpath,$prefix/lib/amd64 $CFLAGS_FOR_TARGET\"\n      ''}\n    '';\n\n  # 'iant' at #go-nuts@freenode, gccgo maintainer, said that\n  # they have a bug in 4.7.1 if adding \"--disable-static\"\n  dontDisableStatic = langGo || staticCompiler;\n\n  configureFlags = \"\n    ${if stdenv.isSunOS then\n      \" --enable-long-long --enable-libssp --enable-threads=posix --disable-nls --enable-__cxa_atexit \" +\n      # On Illumos/Solaris GNU as is preferred\n      \" --with-gnu-as --without-gnu-ld \"\n      else \"\"}\n    --enable-lto\n    ${if enableMultilib then \"\" else \"--disable-multilib\"}\n    ${if enableShared then \"\" else \"--disable-shared\"}\n    ${if enablePlugin then \"--enable-plugin\" else \"--disable-plugin\"}\n    ${if ppl != null then \"--with-ppl=${ppl} --disable-ppl-version-check\" else \"\"}\n    ${if cloog != null then\n      \"--with-cloog=${cloog} --disable-cloog-version-check --enable-cloog-backend=isl\"\n      else \"\"}\n    ${if langJava then\n      \"--with-ecj-jar=${javaEcj} \" +\n\n      # Follow Sun's layout for the convenience of IcedTea/OpenJDK.  See\n      # <http://mail.openjdk.java.net/pipermail/distro-pkg-dev/2010-April/008888.html>.\n      \"--enable-java-home --with-java-home=\\${prefix}/lib/jvm/jre \"\n      else \"\"}\n    ${if javaAwtGtk then \"--enable-java-awt=gtk\" else \"\"}\n    ${if langJava && javaAntlr != null then \"--with-antlr-jar=${javaAntlr}\" else \"\"}\n    --with-gmp=${gmp}\n    --with-mpfr=${mpfr}\n    --with-mpc=${mpc}\n    ${if libelf != null then \"--with-libelf=${libelf}\" else \"\"}\n    --disable-libstdcxx-pch\n    --without-included-gettext\n    --with-system-zlib\n    --enable-languages=${\n      concatStrings (intersperse \",\"\n        (  optional langC        \"c\"\n        ++ optional langCC       \"c++\"\n        ++ optional langFortran  \"fortran\"\n        ++ optional langJava     \"java\"\n        ++ optional langAda      \"ada\"\n        ++ optional langVhdl     \"vhdl\"\n        ++ optional langGo       \"go\"\n        )\n      )\n    }\n    ${if (stdenv ? glibc && cross == null)\n      then \" --with-native-system-header-dir=${stdenv.glibc}/include\"\n      else \"\"}\n    ${if langAda then \" --enable-libada\" else \"\"}\n    ${if cross == null && stdenv.isi686 then \"--with-arch=i686\" else \"\"}\n    ${if cross != null then crossConfigureFlags else \"\"}\n    ${if !bootstrap then \"--disable-bootstrap\" else \"\"}\n    ${if cross == null then platformFlags else \"\"}\n  \";\n\n  targetConfig = if cross != null then cross.config else null;\n\n  buildFlags = if bootstrap then\n    (if profiledCompiler then \"profiledbootstrap\" else \"bootstrap\")\n    else \"\";\n\n  installTargets =\n    if stripped\n    then \"install-strip\"\n    else \"install\";\n\n  crossAttrs = let\n    xgccArch = lib.attrByPath [ \"gcc\" \"arch\" ] null stdenv.cross;\n    xgccCpu = lib.attrByPath [ \"gcc\" \"cpu\" ] null stdenv.cross;\n    xgccAbi = lib.attrByPath [ \"gcc\" \"abi\" ] null stdenv.cross;\n    xgccFpu = lib.attrByPath [ \"gcc\" \"fpu\" ] null stdenv.cross;\n    xgccFloat = lib.attrByPath [ \"gcc\" \"float\" ] null stdenv.cross;\n    xwithArch = if xgccArch != null then \" --with-arch=${xgccArch}\" else \"\";\n    xwithCpu = if xgccCpu != null then \" --with-cpu=${xgccCpu}\" else \"\";\n    xwithAbi = if xgccAbi != null then \" --with-abi=${xgccAbi}\" else \"\";\n    xwithFpu = if xgccFpu != null then \" --with-fpu=${xgccFpu}\" else \"\";\n    xwithFloat = if xgccFloat != null then \" --with-float=${xgccFloat}\" else \"\";\n  in {\n    AR = \"${stdenv.cross.config}-ar\";\n    LD = \"${stdenv.cross.config}-ld\";\n    CC = \"${stdenv.cross.config}-gcc\";\n    CXX = \"${stdenv.cross.config}-gcc\";\n    AR_FOR_TARGET = \"${stdenv.cross.config}-ar\";\n    LD_FOR_TARGET = \"${stdenv.cross.config}-ld\";\n    CC_FOR_TARGET = \"${stdenv.cross.config}-gcc\";\n    NM_FOR_TARGET = \"${stdenv.cross.config}-nm\";\n    CXX_FOR_TARGET = \"${stdenv.cross.config}-g++\";\n    # If we are making a cross compiler, cross != null\n    NIX_GCC_CROSS = if cross == null then \"${stdenv.gccCross}\" else \"\";\n    dontStrip = true;\n    configureFlags = ''\n      ${if enableMultilib then \"\" else \"--disable-multilib\"}\n      ${if enableShared then \"\" else \"--disable-shared\"}\n      ${if ppl != null then \"--with-ppl=${ppl.crossDrv}\" else \"\"}\n      ${if cloog != null then \"--with-cloog=${cloog.crossDrv} --enable-cloog-backend=isl\" else \"\"}\n      ${if langJava then \"--with-ecj-jar=${javaEcj.crossDrv}\" else \"\"}\n      ${if javaAwtGtk then \"--enable-java-awt=gtk\" else \"\"}\n      ${if langJava && javaAntlr != null then \"--with-antlr-jar=${javaAntlr.crossDrv}\" else \"\"}\n      --with-gmp=${gmp.crossDrv}\n      --with-mpfr=${mpfr.crossDrv}\n      --disable-libstdcxx-pch\n      --without-included-gettext\n      --with-system-zlib\n      --enable-languages=${\n        concatStrings (intersperse \",\"\n          (  optional langC        \"c\"\n          ++ optional langCC       \"c++\"\n          ++ optional langFortran  \"fortran\"\n          ++ optional langJava     \"java\"\n          ++ optional langAda      \"ada\"\n          ++ optional langVhdl     \"vhdl\"\n          ++ optional langGo       \"go\"\n          )\n        )\n      }\n      ${if langAda then \" --enable-libada\" else \"\"}\n      --target=${stdenv.cross.config}\n      ${xwithArch}\n      ${xwithCpu}\n      ${xwithAbi}\n      ${xwithFpu}\n      ${xwithFloat}\n    '';\n    buildFlags = \"\";\n  };\n\n\n  # Needed for the cross compilation to work\n  AR = \"ar\";\n  LD = \"ld\";\n  # http://gcc.gnu.org/install/specific.html#x86-64-x-solaris210\n  CC = if stdenv.system == \"x86_64-solaris\" then \"gcc -m64\"\n       else \"gcc\";\n\n  # Setting $CPATH and $LIBRARY_PATH to make sure both `gcc' and `xgcc' find\n  # the library headers and binaries, regarless of the language being\n  # compiled.\n\n  # Note: When building the Java AWT GTK+ peer, the build system doesn't\n  # honor `--with-gmp' et al., e.g., when building\n  # `libjava/classpath/native/jni/java-math/gnu_java_math_GMP.c', so we just\n  # add them to $CPATH and $LIBRARY_PATH in this case.\n  #\n  # Likewise, the LTO code doesn't find zlib.\n\n  CPATH = concatStrings\n            (intersperse \":\" (map (x: x + \"/include\")\n                                  (optionals (zlib != null) [ zlib ]\n                                   ++ optionals langJava [ boehmgc ]\n                                   ++ optionals javaAwtGtk xlibs\n                                   ++ optionals javaAwtGtk [ gmp mpfr ]\n                                   ++ optional (libpthread != null) libpthread\n                                   ++ optional (libpthreadCross != null) libpthreadCross\n\n                                   # On GNU/Hurd glibc refers to Mach & Hurd\n                                   # headers.\n                                   ++ optionals (libcCross != null &&\n                                                 hasAttr \"propagatedBuildInputs\" libcCross)\n                                        libcCross.propagatedBuildInputs)));\n\n  LIBRARY_PATH = concatStrings\n                   (intersperse \":\" (map (x: x + \"/lib\")\n                                         (optionals (zlib != null) [ zlib ]\n                                          ++ optionals langJava [ boehmgc ]\n                                          ++ optionals javaAwtGtk xlibs\n                                          ++ optionals javaAwtGtk [ gmp mpfr ]\n                                          ++ optional (libpthread != null) libpthread)));\n\n  EXTRA_TARGET_CFLAGS =\n    if cross != null && libcCross != null\n    then \"-idirafter ${libcCross}/include\"\n    else null;\n\n  EXTRA_TARGET_LDFLAGS =\n    if cross != null && libcCross != null\n    then \"-B${libcCross}/lib -Wl,-L${libcCross}/lib\" +\n         (optionalString (libpthreadCross != null)\n           \" -L${libpthreadCross}/lib -Wl,${libpthreadCross.TARGET_LDFLAGS}\")\n    else null;\n\n  passthru = { inherit langC langCC langAda langFortran langVhdl\n      langGo enableMultilib version; };\n\n  inherit enableParallelBuilding;\n\n  meta = {\n    homepage = \"http://gcc.gnu.org/\";\n    license = \"GPLv3+\";  # runtime support libraries are typically LGPLv3+\n    description = \"GNU Compiler Collection, version ${version}\"\n      + (if stripped then \"\" else \" (with debugging info)\");\n\n    longDescription = ''\n      The GNU Compiler Collection includes compiler front ends for C, C++,\n      Objective-C, Fortran, OpenMP for C/C++/Fortran, Java, and Ada, as well\n      as libraries for these languages (libstdc++, libgcj, libgomp,...).\n\n      GCC development is a part of the GNU Project, aiming to improve the\n      compiler used in the GNU system including the GNU/Linux variant.\n    '';\n\n    maintainers = [\n      lib.maintainers.ludo\n      lib.maintainers.viric\n      lib.maintainers.shlevy\n    ];\n\n    # Volunteers needed for the {Cyg,Dar}win ports of *PPL.\n    # gnatboot is not available out of linux platforms, so we disable the darwin build\n    # for the gnat (ada compiler).\n    platforms = lib.platforms.linux ++ optionals (langAda == false && libelf == null) [ \"i686-darwin\" ];\n  };\n}\n\n// optionalAttrs (cross != null && cross.libc == \"msvcrt\" && crossStageStatic) {\n  makeFlags = [ \"all-gcc\" \"all-target-libgcc\" ];\n  installTargets = \"install-gcc install-target-libgcc\";\n}\n\n\n# Strip kills static libs of other archs (hence cross != null)\n// optionalAttrs (!stripped || cross != null) { dontStrip = true; NIX_STRIP_DEBUG = 0; }\n)\n"
  },
  {
    "path": "pkgs/gcc-4.7/gfortran-driving.patch",
    "content": "This patch fixes interaction with Libtool.\nSee <http://thread.gmane.org/gmane.comp.gcc.patches/258777>, for details.\n\n--- a/gcc/fortran/gfortranspec.c\n+++ b/gcc/fortran/gfortranspec.c\n@@ -461,8 +461,15 @@ For more information about these matters, see the file named COPYING\\n\\n\"));\n     {\n       fprintf (stderr, _(\"Driving:\"));\n       for (i = 0; i < g77_newargc; i++)\n+\t{\n+\t  if (g77_new_decoded_options[i].opt_index == OPT_l)\n+\t    /* Make sure no white space is inserted after `-l'.  */\n+\t    fprintf (stderr, \" -l%s\",\n+\t\t     g77_new_decoded_options[i].canonical_option[1]);\n+\t  else\n \tfprintf (stderr, \" %s\",\n \t\t g77_new_decoded_options[i].orig_option_with_args_text);\n+\t}\n       fprintf (stderr, \"\\n\");\n     }\n"
  },
  {
    "path": "pkgs/gcc-4.7/gnat-cflags.patch",
    "content": "diff --git a/libada/Makefile.in b/libada/Makefile.in\nindex f5057a0..337e0c6 100644\n--- a/libada/Makefile.in\n+++ b/libada/Makefile.in\n@@ -55,7 +55,7 @@ GCC_WARN_CFLAGS = $(LOOSE_WARN)\n WARN_CFLAGS = @warn_cflags@\n \n TARGET_LIBGCC2_CFLAGS=\n-GNATLIBCFLAGS= -g -O2\n+GNATLIBCFLAGS= -g -O2 $(CFLAGS)\n GNATLIBCFLAGS_FOR_C = $(GNATLIBCFLAGS) $(TARGET_LIBGCC2_CFLAGS) -fexceptions \\\n \t-DIN_RTS @have_getipinfo@\n \n--- a/gcc/ada/gcc-interface/Makefile.in\n+++ b/gcc/ada/gcc-interface/Makefile.in\n@@ -105,7 +105,7 @@ ADAFLAGS = -W -Wall -gnatpg -gnata\n SOME_ADAFLAGS =-gnata\n FORCE_DEBUG_ADAFLAGS = -g\n GNATLIBFLAGS = -gnatpg -nostdinc\n-GNATLIBCFLAGS = -g -O2\n+GNATLIBCFLAGS = -g -O2 $(CFLAGS_FOR_TARGET)\n # Pretend that _Unwind_GetIPInfo is available for the target by default.  This\n # should be autodetected during the configuration of libada and passed down to\n # here, but we need something for --disable-libada and hope for the best.\n@@ -193,7 +193,7 @@ RTSDIR = rts$(subst /,_,$(MULTISUBDIR))\n # Link flags used to build gnat tools.  By default we prefer to statically\n # link with libgcc to avoid a dependency on shared libgcc (which is tricky\n # to deal with as it may conflict with the libgcc provided by the system).\n-GCC_LINK_FLAGS=-static-libgcc\n+GCC_LINK_FLAGS=-static-libgcc $(CFLAGS_FOR_TARGET)\n \n # End of variables for you to override.\n \n"
  },
  {
    "path": "pkgs/gcc-4.7/java-jvgenmain-link.patch",
    "content": "The `jvgenmain' executable must be linked against `vec.o', among others,\nsince it uses its vector API.\n\n--- gcc-4.3.3/gcc/java/Make-lang.in\t2008-12-05 00:00:19.000000000 +0100\n+++ gcc-4.3.3/gcc/java/Make-lang.in\t2009-07-03 16:11:41.000000000 +0200\n@@ -109,9 +109,9 @@ jcf-dump$(exeext): $(JCFDUMP_OBJS) $(LIB\n \t$(CC) $(ALL_CFLAGS) $(LDFLAGS) -o $@ $(JCFDUMP_OBJS) \\\n \t\t$(CPPLIBS) $(ZLIB) $(LDEXP_LIB) $(LIBS)\n \n-jvgenmain$(exeext): $(JVGENMAIN_OBJS) $(LIBDEPS)\n+jvgenmain$(exeext): $(JVGENMAIN_OBJS) $(LIBDEPS) $(BUILD_RTL)\n \trm -f $@\n-\t$(CC) $(ALL_CFLAGS) $(LDFLAGS) -o $@ $(JVGENMAIN_OBJS) $(LIBS)\n+\t$(CC) $(ALL_CFLAGS) $(LDFLAGS) -o $@ $(JVGENMAIN_OBJS) $(BUILD_RTL) $(LIBS)\n \n #\f\n # Build hooks:\n"
  },
  {
    "path": "pkgs/gcc-4.7/libstdc++-target.patch",
    "content": "Patch to make the target libraries 'configure' scripts find the proper CPP.\nI noticed that building the mingw32 cross compiler.\nLooking at the build script for mingw in archlinux, I think that only nixos\nneeds this patch. I don't know why.\ndiff --git a/Makefile.in b/Makefile.in\nindex 93f66b6..d691917 100644\n--- a/Makefile.in\n+++ b/Makefile.in\n@@ -266,6 +266,7 @@ BASE_TARGET_EXPORTS = \\\n \tAR=\"$(AR_FOR_TARGET)\"; export AR; \\\n \tAS=\"$(COMPILER_AS_FOR_TARGET)\"; export AS; \\\n \tCC=\"$(CC_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS\"; export CC; \\\n+\tCPP=\"$(CC_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E\"; export CC; \\\n \tCFLAGS=\"$(CFLAGS_FOR_TARGET)\"; export CFLAGS; \\\n \tCONFIG_SHELL=\"$(SHELL)\"; export CONFIG_SHELL; \\\n \tCPPFLAGS=\"$(CPPFLAGS_FOR_TARGET)\"; export CPPFLAGS; \\\n@@ -291,11 +292,13 @@ BASE_TARGET_EXPORTS = \\\n RAW_CXX_TARGET_EXPORTS = \\\n \t$(BASE_TARGET_EXPORTS) \\\n \tCXX_FOR_TARGET=\"$(RAW_CXX_FOR_TARGET)\"; export CXX_FOR_TARGET; \\\n-\tCXX=\"$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS\"; export CXX;\n+\tCXX=\"$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS\"; export CXX; \\\n+\tCXXCPP=\"$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E\"; export CXX;\n \n NORMAL_TARGET_EXPORTS = \\\n \t$(BASE_TARGET_EXPORTS) \\\n-\tCXX=\"$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS\"; export CXX;\n+\tCXX=\"$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS\"; export CXX; \\\n+\tCXXCPP=\"$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E\"; export CXX;\n \n # Where to find GMP\n HOST_GMPLIBS = @gmplibs@\n"
  },
  {
    "path": "pkgs/gcc-4.7/no-sys-dirs.patch",
    "content": "diff -ru gcc-4.3.1-orig/gcc/cppdefault.c gcc-4.3.1/gcc/cppdefault.c\n--- gcc-4.3.1-orig/gcc/cppdefault.c\t2007-07-26 10:37:01.000000000 +0200\n+++ gcc-4.3.1/gcc/cppdefault.c\t2008-06-25 17:48:23.000000000 +0200\n@@ -41,6 +41,10 @@\n # undef CROSS_INCLUDE_DIR\n #endif\n \n+#undef LOCAL_INCLUDE_DIR\n+#undef SYSTEM_INCLUDE_DIR\n+#undef STANDARD_INCLUDE_DIR\n+\n const struct default_include cpp_include_defaults[]\n #ifdef INCLUDE_DEFAULTS\n = INCLUDE_DEFAULTS;\ndiff -ru gcc-4.3.1-orig/gcc/gcc.c gcc-4.3.1/gcc/gcc.c\n--- gcc-4.3.1-orig/gcc/gcc.c\t2008-03-02 23:55:19.000000000 +0100\n+++ gcc-4.3.1/gcc/gcc.c\t2008-06-25 17:52:53.000000000 +0200\n@@ -1478,10 +1478,10 @@\n /* Default prefixes to attach to command names.  */\n \n #ifndef STANDARD_STARTFILE_PREFIX_1\n-#define STANDARD_STARTFILE_PREFIX_1 \"/lib/\"\n+#define STANDARD_STARTFILE_PREFIX_1 \"\"\n #endif\n #ifndef STANDARD_STARTFILE_PREFIX_2\n-#define STANDARD_STARTFILE_PREFIX_2 \"/usr/lib/\"\n+#define STANDARD_STARTFILE_PREFIX_2 \"\"\n #endif\n \n #ifdef CROSS_DIRECTORY_STRUCTURE  /* Don't use these prefixes for a cross compiler.  */\n--- gcc-4.3.1-orig/gcc/Makefile.in\t2008-05-11 20:54:15.000000000 +0200\n+++ gcc-4.3.1/gcc/Makefile.in\t2008-06-25 17:48:23.000000000 +0200\n@@ -3277,7 +3281,7 @@\n   -DGPLUSPLUS_INCLUDE_DIR=\\\"$(gcc_gxx_include_dir)\\\" \\\n   -DGPLUSPLUS_TOOL_INCLUDE_DIR=\\\"$(gcc_gxx_include_dir)/$(target_noncanonical)\\\" \\\n   -DGPLUSPLUS_BACKWARD_INCLUDE_DIR=\\\"$(gcc_gxx_include_dir)/backward\\\" \\\n-  -DLOCAL_INCLUDE_DIR=\\\"$(local_includedir)\\\" \\\n+  -DLOCAL_INCLUDE_DIR=\\\"/no-such-dir\\\" \\\n   -DCROSS_INCLUDE_DIR=\\\"$(CROSS_SYSTEM_HEADER_DIR)\\\" \\\n   -DTOOL_INCLUDE_DIR=\\\"$(gcc_tooldir)/include\\\" \\\n   -DPREFIX=\\\"$(prefix)/\\\" \\\n"
  },
  {
    "path": "pkgs/gcc-4.7/parallel-bconfig-4.7.patch",
    "content": "diff --git a/gcc/Makefile.in b/gcc/Makefile.in\nindex 0f6735a..ba93e9b 100644\n--- a/gcc/Makefile.in\n+++ b/gcc/Makefile.in\n@@ -3904,21 +3904,21 @@ build/genflags.o : genflags.c $(RTL_BASE_H) $(OBSTACK_H) $(BCONFIG_H)\t\\\n   $(SYSTEM_H) coretypes.h $(GTM_H) errors.h $(READ_MD_H) gensupport.h\n build/gengenrtl.o : gengenrtl.c $(BCONFIG_H) $(SYSTEM_H) rtl.def\n gengtype-lex.o build/gengtype-lex.o : gengtype-lex.c gengtype.h $(SYSTEM_H)\n-gengtype-lex.o: $(CONFIG_H)\n+gengtype-lex.o: $(CONFIG_H) $(BCONFIG_H)\n build/gengtype-lex.o: $(BCONFIG_H)\n gengtype-parse.o build/gengtype-parse.o : gengtype-parse.c gengtype.h \\\n   $(SYSTEM_H)\n-gengtype-parse.o: $(CONFIG_H)\n+gengtype-parse.o: $(CONFIG_H) $(BCONFIG_H)\n build/gengtype-parse.o: $(BCONFIG_H)\n gengtype-state.o build/gengtype-state.o: gengtype-state.c $(SYSTEM_H) \\\n   gengtype.h errors.h double-int.h version.h $(HASHTAB_H) $(OBSTACK_H) \\\n   $(XREGEX_H)\n-gengtype-state.o: $(CONFIG_H)\n+gengtype-state.o: $(CONFIG_H) $(BCONFIG_H)\n build/gengtype-state.o: $(BCONFIG_H)\n gengtype.o build/gengtype.o : gengtype.c $(SYSTEM_H) gengtype.h \t\\\n   rtl.def insn-notes.def errors.h double-int.h version.h $(HASHTAB_H) \\\n   $(OBSTACK_H) $(XREGEX_H)\n-gengtype.o: $(CONFIG_H)\n+gengtype.o: $(CONFIG_H) $(BCONFIG_H)\n build/gengtype.o: $(BCONFIG_H)\n build/genmddeps.o: genmddeps.c $(BCONFIG_H) $(SYSTEM_H) coretypes.h\t\\\n   errors.h $(READ_MD_H)\n"
  },
  {
    "path": "pkgs/gecko/default.nix",
    "content": "{ geckoSrc ? null, lib\n, stdenv, fetchFromGitHub, pythonFull, which, autoconf213, m4\n, perl, unzip, zip, gnumake, yasm, pkgconfig, xlibs, gnome2, pango, freetype, fontconfig, cairo\n, dbus, dbus_glib, alsaLib, libpulseaudio\n, gtk3, glib, gobjectIntrospection, gdk_pixbuf, atk, gtk2\n, git, mercurial, openssl, cmake, procps\n, libnotify\n, valgrind, gdb, rr\n, inotify-tools\n, setuptools\n, rust # rust & cargo bundled. (otheriwse use pkgs.rust.{rustc,cargo})\n, buildFHSUserEnv # Build a FHS environment with all Gecko dependencies.\n, llvm, llvmPackages, nasm\n, ccache\n\n, zlib, xorg\n, rust-cbindgen\n, nodejs\n, jsdoc\n, fzf # needed by \"mack try fuzzy\"\n}:\n\nlet\n\n  inherit (lib) updateFromGitHub importJSON optionals inNixShell;\n\n  gcc = if stdenv.cc.isGNU then stdenv.cc.cc else stdenv.cc.cc.stdenv.cc.cc;\n\n  # Gecko sources are huge, we do not want to import them in the nix-store when\n  # we use this expression for making a build environment.\n  src =\n    if inNixShell then\n      null\n    else if geckoSrc == null then\n      fetchFromGitHub (importJSON ./source.json)\n    else\n      geckoSrc;\n\n  version = \"HEAD\"; # XXX: builtins.readFile \"${src}/browser/config/version.txt\";\n\n  buildInputs = [\n\n    # Expected by \"mach\"\n    pythonFull setuptools which autoconf213 m4\n\n    # Expected by the configure script\n    perl unzip zip gnumake yasm pkgconfig\n\n    xlibs.libICE xlibs.libSM xlibs.libX11 xlibs.libXau xlibs.libxcb\n    xlibs.libXdmcp xlibs.libXext xlibs.libXt xlibs.libXtst\n    xlibs.libXcomposite\n    xlibs.libXfixes\n    xlibs.libXdamage xlibs.libXrender\n    ] ++ (if xlibs ? xproto then [\n    xlibs.damageproto xlibs.printproto xlibs.kbproto\n    xlibs.renderproto xlibs.xextproto xlibs.xproto\n    xlibs.compositeproto xlibs.fixesproto\n    ] else [\n    xorg.xorgproto\n    ]) ++ [\n    gnome2.libart_lgpl gnome2.libbonobo gnome2.libbonoboui\n    gnome2.libgnome gnome2.libgnomecanvas gnome2.libgnomeui\n    gnome2.libIDL\n\n    pango freetype fontconfig cairo\n\n    dbus dbus_glib\n\n    alsaLib libpulseaudio\n\n    gtk3 glib gobjectIntrospection gdk_pixbuf atk\n    gtk2 gnome2.GConf\n\n    rust\n\n    # For building bindgen\n    # Building bindgen is now done with the extra options added by genMozConfig\n    # shellHook, do not include clang directly in order to avoid messing up with\n    # the choices of the compilers.\n\n    # clang\n    llvm\n\n    # mach mochitest\n    procps\n\n    # \"mach vendor rust\" wants to list modified files by using the vcs.\n    git mercurial\n\n    # needed for compiling cargo-vendor and its dependencies\n    openssl cmake\n\n    # Useful for getting notification at the end of the build.\n    libnotify\n\n    # cbindgen is used to generate C bindings for WebRender.\n    rust-cbindgen\n\n    # nasm is used to build libdav1d.\n    nasm\n\n    # NodeJS is used for tooling around JS development.\n    nodejs\n\n    # Used for building documentation.\n    # jsdoc\n\n  ] ++ optionals inNixShell [\n    valgrind gdb ccache\n    (if stdenv.isAarch64 then null else rr)\n    fzf # needed by \"mach try fuzzy\"\n    inotify-tools # Workaround download of prebuilt binaries.\n  ];\n\n  # bindgen.configure now has a rule to check that with-libclang-path matches CC\n  # or CXX. Default to the stdenv compiler if we are compiling with clang.\n  clang_path =\n    if stdenv.cc.isGNU then \"${llvmPackages.clang}/bin/clang\"\n    else \"${stdenv.cc}/bin/cc\";\n  libclang_path =\n    if stdenv.cc.isGNU then \"${llvmPackages.clang.cc.lib}/lib\"\n    else \"${stdenv.cc.cc.lib}/lib\";\n\n  genMozConfig = ''\n    cxxLib=$( echo -n ${gcc}/include/c++/* )\n    archLib=$cxxLib/$( ${gcc}/bin/gcc -dumpmachine )\n\n    cat - > $MOZCONFIG <<EOF\n    ac_add_options --disable-bootstrap\n    #ac_add_options --without-wasm-sandboxed-libraries # this may be needed\n    mk_add_options AUTOCONF=${autoconf213}/bin/autoconf\n    ac_add_options --with-libclang-path=${libclang_path}\n    ac_add_options --with-clang-path=${clang_path}\n    export BINDGEN_CFLAGS=\"-cxx-isystem $cxxLib -isystem $archLib\"\n    export CC=\"${stdenv.cc}/bin/cc\"\n    export CXX=\"${stdenv.cc}/bin/c++\"\n    EOF\n  '';\n\n  shellHook = ''\n    export MOZCONFIG=$PWD/.mozconfig.nix-shell\n    export MOZBUILD_STATE_PATH=$PWD/.mozbuild\n    export CC=\"${stdenv.cc}/bin/cc\";\n    export CXX=\"${stdenv.cc}/bin/c++\";\n    # To be used when building the JS Shell.\n    export NIX_EXTRA_CONFIGURE_ARGS=\"--with-libclang-path=${libclang_path} --with-clang-path=${clang_path}\"\n    cxxLib=$( echo -n ${gcc}/include/c++/* )\n    archLib=$cxxLib/$( ${gcc}/bin/gcc -dumpmachine )\n    export BINDGEN_CFLAGS=\"-cxx-isystem $cxxLib -isystem $archLib\"\n    ${genMozConfig}\n    ${builtins.getEnv \"NIX_SHELL_HOOK\"}\n    unset AS\n  '';\n\n  # propagatedBuildInput should already have applied the \"lib.chooseDevOutputs\"\n  # on the propagated build inputs.\n  pullAllInputs = inputs:\n    inputs ++ lib.concatMap (i: pullAllInputs (i.propagatedNativeBuildInputs or [])) inputs;\n\n  fhs = buildFHSUserEnv rec {\n    name = \"gecko-deps-fhs\";\n    targetPkgs = _: pullAllInputs (lib.chooseDevOutputs (buildInputs ++ [ stdenv.cc zlib xorg.libXinerama xorg.libXxf86vm ]));\n    multiPkgs = null; #targetPkgs;\n    extraOutputsToInstall = [ \"share\" ];\n    profile = ''\n      # build-fhs-userenv/env.nix adds it, but causes 'ls' to SEGV.\n      unset LD_LIBRARY_PATH;\n      export LD_LIBRARY_PATH=/lib/;\n      export IN_NIX_SHELL=1\n      export PKG_CONFIG_PATH=/usr/lib/pkgconfig:/usr/share/pkgconfig\n      ${shellHook}\n    '';\n  };\nin\n\nstdenv.mkDerivation {\n  name = \"gecko-dev-${version}\";\n  inherit src buildInputs shellHook;\n\n  # Useful for debugging this Nix expression.\n  tracePhases = true;\n\n  configurePhase = ''\n    unset AS; # Set to CC when configured.\n    export MOZBUILD_STATE_PATH=$(pwd)/.mozbuild\n    export MOZCONFIG=$(pwd)/.mozconfig\n    export builddir=$(pwd)/builddir\n    ${genMozConfig}\n\n    mkdir -p $MOZBUILD_STATE_PATH $builddir\n\n    echo >> $MOZCONFIG \"\n    # . $src/build/mozconfig.common\n\n    ac_add_options --enable-application=browser\n    mk_add_options MOZ_OBJDIR=$builddir\n    ac_add_options --prefix=$out\n    ac_add_options --enable-official-branding\n    \"\n  '';\n\n  AUTOCONF = \"${autoconf213}/bin/autoconf\";\n\n  buildPhase = ''\n    cd $builddir\n    $src/mach build\n  '';\n\n  installPhase = ''\n    cd $builddir\n    $src/mach install\n  '';\n\n  # TODO: are there tests we would like to run? or should we package them separately?\n  doCheck = false;\n  doInstallCheck = false;\n\n  # This is for debugging purposes, go to hell damn wrapper which are removing\n  # all I need for debugging.\n  hardeningDisable = [ \"all\" ];\n\n  passthru.updateScript = updateFromGitHub {\n    owner = \"mozilla\";\n    repo = \"gecko-dev\";\n    branch = \"master\";\n    path = \"pkgs/gecko/source.json\";\n  };\n  passthru.fhs = fhs; # gecko.x86_64-linux.gcc.fhs.env\n}\n"
  },
  {
    "path": "pkgs/gecko/source.json",
    "content": "{\n  \"owner\": \"mozilla\",\n  \"repo\": \"gecko-dev\",\n  \"rev\": \"fee636af734a0ce6dc7335691cc94664bafc385d\",\n  \"sha256\": \"0nnkqmglbi2znkz1avnyn064i5hngvsqrmhw8ccg6g4ga9bac8fv\"\n}\n"
  },
  {
    "path": "pkgs/git-cinnabar/default.nix",
    "content": "{ stdenv, fetchFromGitHub, autoconf\n, zlib\n, python\n, perl\n, gettext\n, git\n, mercurial\n, curl\n}:\n\n# NOTE: git-cinnabar depends on a specific version of git-core, thus you should\n# ensure that you install a git-cinnabar version which matches your git version.\n#\n# NOTE: This package only provides git-cinnabar tools, as a git users might want\n# to have additional commands not provided by this forked version of git-core.\nstdenv.mkDerivation rec {\n  version = \"0.5.4\";\n  name = \"git-cinnabar-${version}\";\n  src = fetchFromGitHub {\n    owner = \"glandium\";\n    repo = \"git-cinnabar\";\n    inherit name;\n    rev = version; # tag name\n    fetchSubmodules = true;\n    sha256 = \"1cjn2cc6mj4m736wxab9s6qx83p5n5ha8cr3x84s9ra6rxs8d7pi\";\n  };\n  buildInputs = [ autoconf python gettext git curl ];\n\n  ZLIB_PATH = zlib;\n  ZLIB_DEV_PATH = zlib.dev;\n\n  PERL_PATH = \"${perl}/bin/perl\";\n  NO_TCLTK = true;\n  V=1;\n\n  preBuild = ''\n    export ZLIB_PATH;\n    export ZLIB_DEV_PATH;\n    substituteInPlace git-core/Makefile --replace \\\n      '$(ZLIB_PATH)/include' '$(ZLIB_DEV_PATH)/include'\n    # Comment out calls to git to try to verify that git-core is up to date\n    substituteInPlace Makefile \\\n      --replace '$(eval $(call exec,git' '# $(eval $(call exec,git'\n\n\n    export PERL_PATH;\n    export NO_TCLTK\n    export V;\n  '';\n\n  makeFlags = \"prefix=\\${out}\";\n\n  installTargets = \"git-install\";\n\n  postInstall =\n    let mercurial-py = mercurial + \"/\" + mercurial.python.sitePackages; in ''\n    # git-cinnabar rebuild git, we do not need that.\n    rm -rf $out/bin/* $out/share $out/lib\n    for f in $out/libexec/git-core/{git-remote-hg,git-cinnabar} ; do\n      substituteInPlace $f --replace \\\n        \"sys.path.append(os.path.join(os.path.dirname(__file__), 'pythonlib'))\" \\\n        \"sys.path.extend(['$out/libexec/git-core/pythonlib', '${mercurial-py}'])\"\n      mv $f $out/bin\n    done\n    mv $out/libexec/git-core/git-cinnabar-helper $out/bin/git-cinnabar-helper\n    mv $out/libexec/git-core/pythonlib $out/pythonlib\n    rm -rf $out/libexec/git-core/*\n    mv $out/pythonlib $out/libexec/git-core/pythonlib\n    substituteInPlace $out/libexec/git-core/pythonlib/cinnabar/helper.py \\\n      --replace 'Git.config('cinnabar.helper')' \"Git.config('cinnabar.helper') or '$out/bin/git-cinnabar-helper'\"\n  '';\n}\n"
  },
  {
    "path": "pkgs/jsdoc/default.nix",
    "content": "# This file has been generated by node2nix 1.9.0. Do not edit!\n\n{pkgs ? import <nixpkgs> {\n    inherit system;\n  }, system ? builtins.currentSystem, nodejs ? pkgs.\"nodejs-12_x\"}:\n\nlet\n  nodeEnv = import ./node-env.nix {\n    inherit (pkgs) stdenv lib python2 runCommand writeTextFile;\n    inherit pkgs nodejs;\n    libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;\n  };\nin\nimport ./node-packages.nix {\n  inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;\n  inherit nodeEnv;\n}\n"
  },
  {
    "path": "pkgs/jsdoc/node-env.nix",
    "content": "# This file originates from node2nix\n\n{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}:\n\nlet\n  # Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master\n  utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;\n\n  python = if nodejs ? python then nodejs.python else python2;\n\n  # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise\n  tarWrapper = runCommand \"tarWrapper\" {} ''\n    mkdir -p $out/bin\n\n    cat > $out/bin/tar <<EOF\n    #! ${stdenv.shell} -e\n    $(type -p tar) \"\\$@\" --warning=no-unknown-keyword --delay-directory-restore\n    EOF\n\n    chmod +x $out/bin/tar\n  '';\n\n  # Function that generates a TGZ file from a NPM project\n  buildNodeSourceDist =\n    { name, version, src, ... }:\n\n    stdenv.mkDerivation {\n      name = \"node-tarball-${name}-${version}\";\n      inherit src;\n      buildInputs = [ nodejs ];\n      buildPhase = ''\n        export HOME=$TMPDIR\n        tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)\n      '';\n      installPhase = ''\n        mkdir -p $out/tarballs\n        mv $tgzFile $out/tarballs\n        mkdir -p $out/nix-support\n        echo \"file source-dist $out/tarballs/$tgzFile\" >> $out/nix-support/hydra-build-products\n      '';\n    };\n\n  includeDependencies = {dependencies}:\n    lib.optionalString (dependencies != [])\n      (lib.concatMapStrings (dependency:\n        ''\n          # Bundle the dependencies of the package\n          mkdir -p node_modules\n          cd node_modules\n\n          # Only include dependencies if they don't exist. They may also be bundled in the package.\n          if [ ! -e \"${dependency.name}\" ]\n          then\n              ${composePackage dependency}\n          fi\n\n          cd ..\n        ''\n      ) dependencies);\n\n  # Recursively composes the dependencies of a package\n  composePackage = { name, packageName, src, dependencies ? [], ... }@args:\n    builtins.addErrorContext \"while evaluating node package '${packageName}'\" ''\n      DIR=$(pwd)\n      cd $TMPDIR\n\n      unpackFile ${src}\n\n      # Make the base dir in which the target dependency resides first\n      mkdir -p \"$(dirname \"$DIR/${packageName}\")\"\n\n      if [ -f \"${src}\" ]\n      then\n          # Figure out what directory has been unpacked\n          packageDir=\"$(find . -maxdepth 1 -type d | tail -1)\"\n\n          # Restore write permissions to make building work\n          find \"$packageDir\" -type d -exec chmod u+x {} \\;\n          chmod -R u+w \"$packageDir\"\n\n          # Move the extracted tarball into the output folder\n          mv \"$packageDir\" \"$DIR/${packageName}\"\n      elif [ -d \"${src}\" ]\n      then\n          # Get a stripped name (without hash) of the source directory.\n          # On old nixpkgs it's already set internally.\n          if [ -z \"$strippedName\" ]\n          then\n              strippedName=\"$(stripHash ${src})\"\n          fi\n\n          # Restore write permissions to make building work\n          chmod -R u+w \"$strippedName\"\n\n          # Move the extracted directory into the output folder\n          mv \"$strippedName\" \"$DIR/${packageName}\"\n      fi\n\n      # Unset the stripped name to not confuse the next unpack step\n      unset strippedName\n\n      # Include the dependencies of the package\n      cd \"$DIR/${packageName}\"\n      ${includeDependencies { inherit dependencies; }}\n      cd ..\n      ${lib.optionalString (builtins.substring 0 1 packageName == \"@\") \"cd ..\"}\n    '';\n\n  pinpointDependencies = {dependencies, production}:\n    let\n      pinpointDependenciesFromPackageJSON = writeTextFile {\n        name = \"pinpointDependencies.js\";\n        text = ''\n          var fs = require('fs');\n          var path = require('path');\n\n          function resolveDependencyVersion(location, name) {\n              if(location == process.env['NIX_STORE']) {\n                  return null;\n              } else {\n                  var dependencyPackageJSON = path.join(location, \"node_modules\", name, \"package.json\");\n\n                  if(fs.existsSync(dependencyPackageJSON)) {\n                      var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));\n\n                      if(dependencyPackageObj.name == name) {\n                          return dependencyPackageObj.version;\n                      }\n                  } else {\n                      return resolveDependencyVersion(path.resolve(location, \"..\"), name);\n                  }\n              }\n          }\n\n          function replaceDependencies(dependencies) {\n              if(typeof dependencies == \"object\" && dependencies !== null) {\n                  for(var dependency in dependencies) {\n                      var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);\n\n                      if(resolvedVersion === null) {\n                          process.stderr.write(\"WARNING: cannot pinpoint dependency: \"+dependency+\", context: \"+process.cwd()+\"\\n\");\n                      } else {\n                          dependencies[dependency] = resolvedVersion;\n                      }\n                  }\n              }\n          }\n\n          /* Read the package.json configuration */\n          var packageObj = JSON.parse(fs.readFileSync('./package.json'));\n\n          /* Pinpoint all dependencies */\n          replaceDependencies(packageObj.dependencies);\n          if(process.argv[2] == \"development\") {\n              replaceDependencies(packageObj.devDependencies);\n          }\n          replaceDependencies(packageObj.optionalDependencies);\n\n          /* Write the fixed package.json file */\n          fs.writeFileSync(\"package.json\", JSON.stringify(packageObj, null, 2));\n        '';\n      };\n    in\n    ''\n      node ${pinpointDependenciesFromPackageJSON} ${if production then \"production\" else \"development\"}\n\n      ${lib.optionalString (dependencies != [])\n        ''\n          if [ -d node_modules ]\n          then\n              cd node_modules\n              ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}\n              cd ..\n          fi\n        ''}\n    '';\n\n  # Recursively traverses all dependencies of a package and pinpoints all\n  # dependencies in the package.json file to the versions that are actually\n  # being used.\n\n  pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:\n    ''\n      if [ -d \"${packageName}\" ]\n      then\n          cd \"${packageName}\"\n          ${pinpointDependencies { inherit dependencies production; }}\n          cd ..\n          ${lib.optionalString (builtins.substring 0 1 packageName == \"@\") \"cd ..\"}\n      fi\n    '';\n\n  # Extract the Node.js source code which is used to compile packages with\n  # native bindings\n  nodeSources = runCommand \"node-sources\" {} ''\n    tar --no-same-owner --no-same-permissions -xf ${nodejs.src}\n    mv node-* $out\n  '';\n\n  # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)\n  addIntegrityFieldsScript = writeTextFile {\n    name = \"addintegrityfields.js\";\n    text = ''\n      var fs = require('fs');\n      var path = require('path');\n\n      function augmentDependencies(baseDir, dependencies) {\n          for(var dependencyName in dependencies) {\n              var dependency = dependencies[dependencyName];\n\n              // Open package.json and augment metadata fields\n              var packageJSONDir = path.join(baseDir, \"node_modules\", dependencyName);\n              var packageJSONPath = path.join(packageJSONDir, \"package.json\");\n\n              if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored\n                  console.log(\"Adding metadata fields to: \"+packageJSONPath);\n                  var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));\n\n                  if(dependency.integrity) {\n                      packageObj[\"_integrity\"] = dependency.integrity;\n                  } else {\n                      packageObj[\"_integrity\"] = \"sha1-000000000000000000000000000=\"; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.\n                  }\n\n                  if(dependency.resolved) {\n                      packageObj[\"_resolved\"] = dependency.resolved; // Adopt the resolved property if one has been provided\n                  } else {\n                      packageObj[\"_resolved\"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.\n                  }\n\n                  if(dependency.from !== undefined) { // Adopt from property if one has been provided\n                      packageObj[\"_from\"] = dependency.from;\n                  }\n\n                  fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));\n              }\n\n              // Augment transitive dependencies\n              if(dependency.dependencies !== undefined) {\n                  augmentDependencies(packageJSONDir, dependency.dependencies);\n              }\n          }\n      }\n\n      if(fs.existsSync(\"./package-lock.json\")) {\n          var packageLock = JSON.parse(fs.readFileSync(\"./package-lock.json\"));\n\n          if(![1, 2].includes(packageLock.lockfileVersion)) {\n             process.stderr.write(\"Sorry, I only understand lock file versions 1 and 2!\\n\");\n             process.exit(1);\n          }\n\n          if(packageLock.dependencies !== undefined) {\n              augmentDependencies(\".\", packageLock.dependencies);\n          }\n      }\n    '';\n  };\n\n  # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes\n  reconstructPackageLock = writeTextFile {\n    name = \"addintegrityfields.js\";\n    text = ''\n      var fs = require('fs');\n      var path = require('path');\n\n      var packageObj = JSON.parse(fs.readFileSync(\"package.json\"));\n\n      var lockObj = {\n          name: packageObj.name,\n          version: packageObj.version,\n          lockfileVersion: 1,\n          requires: true,\n          dependencies: {}\n      };\n\n      function augmentPackageJSON(filePath, dependencies) {\n          var packageJSON = path.join(filePath, \"package.json\");\n          if(fs.existsSync(packageJSON)) {\n              var packageObj = JSON.parse(fs.readFileSync(packageJSON));\n              dependencies[packageObj.name] = {\n                  version: packageObj.version,\n                  integrity: \"sha1-000000000000000000000000000=\",\n                  dependencies: {}\n              };\n              processDependencies(path.join(filePath, \"node_modules\"), dependencies[packageObj.name].dependencies);\n          }\n      }\n\n      function processDependencies(dir, dependencies) {\n          if(fs.existsSync(dir)) {\n              var files = fs.readdirSync(dir);\n\n              files.forEach(function(entry) {\n                  var filePath = path.join(dir, entry);\n                  var stats = fs.statSync(filePath);\n\n                  if(stats.isDirectory()) {\n                      if(entry.substr(0, 1) == \"@\") {\n                          // When we encounter a namespace folder, augment all packages belonging to the scope\n                          var pkgFiles = fs.readdirSync(filePath);\n\n                          pkgFiles.forEach(function(entry) {\n                              if(stats.isDirectory()) {\n                                  var pkgFilePath = path.join(filePath, entry);\n                                  augmentPackageJSON(pkgFilePath, dependencies);\n                              }\n                          });\n                      } else {\n                          augmentPackageJSON(filePath, dependencies);\n                      }\n                  }\n              });\n          }\n      }\n\n      processDependencies(\"node_modules\", lockObj.dependencies);\n\n      fs.writeFileSync(\"package-lock.json\", JSON.stringify(lockObj, null, 2));\n    '';\n  };\n\n  prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:\n    let\n      forceOfflineFlag = if bypassCache then \"--offline\" else \"--registry http://www.example.com\";\n    in\n    ''\n        # Pinpoint the versions of all dependencies to the ones that are actually being used\n        echo \"pinpointing versions of dependencies...\"\n        source $pinpointDependenciesScriptPath\n\n        # Patch the shebangs of the bundled modules to prevent them from\n        # calling executables outside the Nix store as much as possible\n        patchShebangs .\n\n        # Deploy the Node.js package by running npm install. Since the\n        # dependencies have been provided already by ourselves, it should not\n        # attempt to install them again, which is good, because we want to make\n        # it Nix's responsibility. If it needs to install any dependencies\n        # anyway (e.g. because the dependency parameters are\n        # incomplete/incorrect), it fails.\n        #\n        # The other responsibilities of NPM are kept -- version checks, build\n        # steps, postprocessing etc.\n\n        export HOME=$TMPDIR\n        cd \"${packageName}\"\n        runHook preRebuild\n\n        ${lib.optionalString bypassCache ''\n          ${lib.optionalString reconstructLock ''\n            if [ -f package-lock.json ]\n            then\n                echo \"WARNING: Reconstruct lock option enabled, but a lock file already exists!\"\n                echo \"This will most likely result in version mismatches! We will remove the lock file and regenerate it!\"\n                rm package-lock.json\n            else\n                echo \"No package-lock.json file found, reconstructing...\"\n            fi\n\n            node ${reconstructPackageLock}\n          ''}\n\n          node ${addIntegrityFieldsScript}\n        ''}\n\n        npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production \"--production\"} rebuild\n\n        if [ \"''${dontNpmInstall-}\" != \"1\" ]\n        then\n            # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.\n            rm -f npm-shrinkwrap.json\n\n            npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production \"--production\"} install\n        fi\n    '';\n\n  # Builds and composes an NPM package including all its dependencies\n  buildNodePackage =\n    { name\n    , packageName\n    , version\n    , dependencies ? []\n    , buildInputs ? []\n    , production ? true\n    , npmFlags ? \"\"\n    , dontNpmInstall ? false\n    , bypassCache ? false\n    , reconstructLock ? false\n    , preRebuild ? \"\"\n    , dontStrip ? true\n    , unpackPhase ? \"true\"\n    , buildPhase ? \"true\"\n    , ... }@args:\n\n    let\n      extraArgs = removeAttrs args [ \"name\" \"dependencies\" \"buildInputs\" \"dontStrip\" \"dontNpmInstall\" \"preRebuild\" \"unpackPhase\" \"buildPhase\" ];\n    in\n    stdenv.mkDerivation ({\n      name = \"node_${name}-${version}\";\n      buildInputs = [ tarWrapper python nodejs ]\n        ++ lib.optional (stdenv.isLinux) utillinux\n        ++ lib.optional (stdenv.isDarwin) libtool\n        ++ buildInputs;\n\n      inherit nodejs;\n\n      inherit dontStrip; # Stripping may fail a build for some package deployments\n      inherit dontNpmInstall preRebuild unpackPhase buildPhase;\n\n      compositionScript = composePackage args;\n      pinpointDependenciesScript = pinpointDependenciesOfPackage args;\n\n      passAsFile = [ \"compositionScript\" \"pinpointDependenciesScript\" ];\n\n      installPhase = ''\n        # Create and enter a root node_modules/ folder\n        mkdir -p $out/lib/node_modules\n        cd $out/lib/node_modules\n\n        # Compose the package and all its dependencies\n        source $compositionScriptPath\n\n        ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}\n\n        # Create symlink to the deployed executable folder, if applicable\n        if [ -d \"$out/lib/node_modules/.bin\" ]\n        then\n            ln -s $out/lib/node_modules/.bin $out/bin\n        fi\n\n        # Create symlinks to the deployed manual page folders, if applicable\n        if [ -d \"$out/lib/node_modules/${packageName}/man\" ]\n        then\n            mkdir -p $out/share\n            for dir in \"$out/lib/node_modules/${packageName}/man/\"*\n            do\n                mkdir -p $out/share/man/$(basename \"$dir\")\n                for page in \"$dir\"/*\n                do\n                    ln -s $page $out/share/man/$(basename \"$dir\")\n                done\n            done\n        fi\n\n        # Run post install hook, if provided\n        runHook postInstall\n      '';\n    } // extraArgs);\n\n  # Builds a node environment (a node_modules folder and a set of binaries)\n  buildNodeDependencies =\n    { name\n    , packageName\n    , version\n    , src\n    , dependencies ? []\n    , buildInputs ? []\n    , production ? true\n    , npmFlags ? \"\"\n    , dontNpmInstall ? false\n    , bypassCache ? false\n    , reconstructLock ? false\n    , dontStrip ? true\n    , unpackPhase ? \"true\"\n    , buildPhase ? \"true\"\n    , ... }@args:\n\n    let\n      extraArgs = removeAttrs args [ \"name\" \"dependencies\" \"buildInputs\" ];\n    in\n      stdenv.mkDerivation ({\n        name = \"node-dependencies-${name}-${version}\";\n\n        buildInputs = [ tarWrapper python nodejs ]\n          ++ lib.optional (stdenv.isLinux) utillinux\n          ++ lib.optional (stdenv.isDarwin) libtool\n          ++ buildInputs;\n\n        inherit dontStrip; # Stripping may fail a build for some package deployments\n        inherit dontNpmInstall unpackPhase buildPhase;\n\n        includeScript = includeDependencies { inherit dependencies; };\n        pinpointDependenciesScript = pinpointDependenciesOfPackage args;\n\n        passAsFile = [ \"includeScript\" \"pinpointDependenciesScript\" ];\n\n        installPhase = ''\n          mkdir -p $out/${packageName}\n          cd $out/${packageName}\n\n          source $includeScriptPath\n\n          # Create fake package.json to make the npm commands work properly\n          cp ${src}/package.json .\n          chmod 644 package.json\n          ${lib.optionalString bypassCache ''\n            if [ -f ${src}/package-lock.json ]\n            then\n                cp ${src}/package-lock.json .\n            fi\n          ''}\n\n          # Go to the parent folder to make sure that all packages are pinpointed\n          cd ..\n          ${lib.optionalString (builtins.substring 0 1 packageName == \"@\") \"cd ..\"}\n\n          ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}\n\n          # Expose the executables that were installed\n          cd ..\n          ${lib.optionalString (builtins.substring 0 1 packageName == \"@\") \"cd ..\"}\n\n          mv ${packageName} lib\n          ln -s $out/lib/node_modules/.bin $out/bin\n        '';\n      } // extraArgs);\n\n  # Builds a development shell\n  buildNodeShell =\n    { name\n    , packageName\n    , version\n    , src\n    , dependencies ? []\n    , buildInputs ? []\n    , production ? true\n    , npmFlags ? \"\"\n    , dontNpmInstall ? false\n    , bypassCache ? false\n    , reconstructLock ? false\n    , dontStrip ? true\n    , unpackPhase ? \"true\"\n    , buildPhase ? \"true\"\n    , ... }@args:\n\n    let\n      nodeDependencies = buildNodeDependencies args;\n    in\n    stdenv.mkDerivation {\n      name = \"node-shell-${name}-${version}\";\n\n      buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;\n      buildCommand = ''\n        mkdir -p $out/bin\n        cat > $out/bin/shell <<EOF\n        #! ${stdenv.shell} -e\n        $shellHook\n        exec ${stdenv.shell}\n        EOF\n        chmod +x $out/bin/shell\n      '';\n\n      # Provide the dependencies in a development shell through the NODE_PATH environment variable\n      inherit nodeDependencies;\n      shellHook = lib.optionalString (dependencies != []) ''\n        export NODE_PATH=${nodeDependencies}/lib/node_modules\n        export PATH=\"${nodeDependencies}/bin:$PATH\"\n      '';\n    };\nin\n{\n  buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;\n  buildNodePackage = lib.makeOverridable buildNodePackage;\n  buildNodeDependencies = lib.makeOverridable buildNodeDependencies;\n  buildNodeShell = lib.makeOverridable buildNodeShell;\n}\n"
  },
  {
    "path": "pkgs/jsdoc/node-packages.nix",
    "content": "# This file has been generated by node2nix 1.9.0. Do not edit!\n\n{nodeEnv, fetchurl, fetchgit, nix-gitignore, stdenv, lib, globalBuildInputs ? []}:\n\nlet\n  sources = {\n    \"@babel/parser-7.12.15\" = {\n      name = \"_at_babel_slash_parser\";\n      packageName = \"@babel/parser\";\n      version = \"7.12.15\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/@babel/parser/-/parser-7.12.15.tgz\";\n        sha512 = \"AQBOU2Z9kWwSZMd6lNjCX0GUgFonL1wAM1db8L8PMk9UDaGsRCArBkU4Sc+UCM3AE4hjbXx+h58Lb3QT4oRmrA==\";\n      };\n    };\n    \"argparse-1.0.10\" = {\n      name = \"argparse\";\n      packageName = \"argparse\";\n      version = \"1.0.10\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz\";\n        sha512 = \"o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==\";\n      };\n    };\n    \"bluebird-3.7.2\" = {\n      name = \"bluebird\";\n      packageName = \"bluebird\";\n      version = \"3.7.2\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz\";\n        sha512 = \"XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==\";\n      };\n    };\n    \"catharsis-0.8.11\" = {\n      name = \"catharsis\";\n      packageName = \"catharsis\";\n      version = \"0.8.11\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/catharsis/-/catharsis-0.8.11.tgz\";\n        sha512 = \"a+xUyMV7hD1BrDQA/3iPV7oc+6W26BgVJO05PGEoatMyIuPScQKsde6i3YorWX1qs+AZjnJ18NqdKoCtKiNh1g==\";\n      };\n    };\n    \"entities-2.0.3\" = {\n      name = \"entities\";\n      packageName = \"entities\";\n      version = \"2.0.3\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/entities/-/entities-2.0.3.tgz\";\n        sha512 = \"MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ==\";\n      };\n    };\n    \"escape-string-regexp-2.0.0\" = {\n      name = \"escape-string-regexp\";\n      packageName = \"escape-string-regexp\";\n      version = \"2.0.0\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz\";\n        sha512 = \"UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==\";\n      };\n    };\n    \"graceful-fs-4.2.5\" = {\n      name = \"graceful-fs\";\n      packageName = \"graceful-fs\";\n      version = \"4.2.5\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.5.tgz\";\n        sha512 = \"kBBSQbz2K0Nyn+31j/w36fUfxkBW9/gfwRWdUY1ULReH3iokVJgddZAFcD1D0xlgTmFxJCbUkUclAlc6/IDJkw==\";\n      };\n    };\n    \"js2xmlparser-4.0.1\" = {\n      name = \"js2xmlparser\";\n      packageName = \"js2xmlparser\";\n      version = \"4.0.1\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-4.0.1.tgz\";\n        sha512 = \"KrPTolcw6RocpYjdC7pL7v62e55q7qOMHvLX1UCLc5AAS8qeJ6nukarEJAF2KL2PZxlbGueEbINqZR2bDe/gUw==\";\n      };\n    };\n    \"klaw-3.0.0\" = {\n      name = \"klaw\";\n      packageName = \"klaw\";\n      version = \"3.0.0\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/klaw/-/klaw-3.0.0.tgz\";\n        sha512 = \"0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g==\";\n      };\n    };\n    \"linkify-it-2.2.0\" = {\n      name = \"linkify-it\";\n      packageName = \"linkify-it\";\n      version = \"2.2.0\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/linkify-it/-/linkify-it-2.2.0.tgz\";\n        sha512 = \"GnAl/knGn+i1U/wjBz3akz2stz+HrHLsxMwHQGofCDfPvlf+gDKN58UtfmUquTY4/MXeE2x7k19KQmeoZi94Iw==\";\n      };\n    };\n    \"lodash-4.17.20\" = {\n      name = \"lodash\";\n      packageName = \"lodash\";\n      version = \"4.17.20\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz\";\n        sha512 = \"PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==\";\n      };\n    };\n    \"markdown-it-10.0.0\" = {\n      name = \"markdown-it\";\n      packageName = \"markdown-it\";\n      version = \"10.0.0\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/markdown-it/-/markdown-it-10.0.0.tgz\";\n        sha512 = \"YWOP1j7UbDNz+TumYP1kpwnP0aEa711cJjrAQrzd0UXlbJfc5aAq0F/PZHjiioqDC1NKgvIMX+o+9Bk7yuM2dg==\";\n      };\n    };\n    \"markdown-it-anchor-5.3.0\" = {\n      name = \"markdown-it-anchor\";\n      packageName = \"markdown-it-anchor\";\n      version = \"5.3.0\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-5.3.0.tgz\";\n        sha512 = \"/V1MnLL/rgJ3jkMWo84UR+K+jF1cxNG1a+KwqeXqTIJ+jtA8aWSHuigx8lTzauiIjBDbwF3NcWQMotd0Dm39jA==\";\n      };\n    };\n    \"marked-0.8.2\" = {\n      name = \"marked\";\n      packageName = \"marked\";\n      version = \"0.8.2\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/marked/-/marked-0.8.2.tgz\";\n        sha512 = \"EGwzEeCcLniFX51DhTpmTom+dSA/MG/OBUDjnWtHbEnjAH180VzUeAw+oE4+Zv+CoYBWyRlYOTR0N8SO9R1PVw==\";\n      };\n    };\n    \"mdurl-1.0.1\" = {\n      name = \"mdurl\";\n      packageName = \"mdurl\";\n      version = \"1.0.1\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz\";\n        sha1 = \"fe85b2ec75a59037f2adfec100fd6c601761152e\";\n      };\n    };\n    \"mkdirp-1.0.4\" = {\n      name = \"mkdirp\";\n      packageName = \"mkdirp\";\n      version = \"1.0.4\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz\";\n        sha512 = \"vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==\";\n      };\n    };\n    \"requizzle-0.2.3\" = {\n      name = \"requizzle\";\n      packageName = \"requizzle\";\n      version = \"0.2.3\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/requizzle/-/requizzle-0.2.3.tgz\";\n        sha512 = \"YanoyJjykPxGHii0fZP0uUPEXpvqfBDxWV7s6GKAiiOsiqhX6vHNyW3Qzdmqp/iq/ExbhaGbVrjB4ruEVSM4GQ==\";\n      };\n    };\n    \"sprintf-js-1.0.3\" = {\n      name = \"sprintf-js\";\n      packageName = \"sprintf-js\";\n      version = \"1.0.3\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz\";\n        sha1 = \"04e6926f662895354f3dd015203633b857297e2c\";\n      };\n    };\n    \"strip-json-comments-3.1.1\" = {\n      name = \"strip-json-comments\";\n      packageName = \"strip-json-comments\";\n      version = \"3.1.1\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz\";\n        sha512 = \"6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==\";\n      };\n    };\n    \"taffydb-2.6.2\" = {\n      name = \"taffydb\";\n      packageName = \"taffydb\";\n      version = \"2.6.2\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/taffydb/-/taffydb-2.6.2.tgz\";\n        sha1 = \"7cbcb64b5a141b6a2efc2c5d2c67b4e150b2a268\";\n      };\n    };\n    \"uc.micro-1.0.6\" = {\n      name = \"uc.micro\";\n      packageName = \"uc.micro\";\n      version = \"1.0.6\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz\";\n        sha512 = \"8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==\";\n      };\n    };\n    \"underscore-1.10.2\" = {\n      name = \"underscore\";\n      packageName = \"underscore\";\n      version = \"1.10.2\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/underscore/-/underscore-1.10.2.tgz\";\n        sha512 = \"N4P+Q/BuyuEKFJ43B9gYuOj4TQUHXX+j2FqguVOpjkssLUUrnJofCcBccJSCoeturDoZU6GorDTHSvUDlSQbTg==\";\n      };\n    };\n    \"xmlcreate-2.0.3\" = {\n      name = \"xmlcreate\";\n      packageName = \"xmlcreate\";\n      version = \"2.0.3\";\n      src = fetchurl {\n        url = \"https://registry.npmjs.org/xmlcreate/-/xmlcreate-2.0.3.tgz\";\n        sha512 = \"HgS+X6zAztGa9zIK3Y3LXuJes33Lz9x+YyTxgrkIdabu2vqcGOWwdfCpf1hWLRrd553wd4QCDf6BBO6FfdsRiQ==\";\n      };\n    };\n  };\nin\n{\n  jsdoc = nodeEnv.buildNodePackage {\n    name = \"jsdoc\";\n    packageName = \"jsdoc\";\n    version = \"3.6.6\";\n    src = fetchurl {\n      url = \"https://registry.npmjs.org/jsdoc/-/jsdoc-3.6.6.tgz\";\n      sha512 = \"znR99e1BHeyEkSvgDDpX0sTiTu+8aQyDl9DawrkOGZTTW8hv0deIFXx87114zJ7gRaDZKVQD/4tr1ifmJp9xhQ==\";\n    };\n    dependencies = [\n      sources.\"@babel/parser-7.12.15\"\n      sources.\"argparse-1.0.10\"\n      sources.\"bluebird-3.7.2\"\n      sources.\"catharsis-0.8.11\"\n      sources.\"entities-2.0.3\"\n      sources.\"escape-string-regexp-2.0.0\"\n      sources.\"graceful-fs-4.2.5\"\n      sources.\"js2xmlparser-4.0.1\"\n      sources.\"klaw-3.0.0\"\n      sources.\"linkify-it-2.2.0\"\n      sources.\"lodash-4.17.20\"\n      sources.\"markdown-it-10.0.0\"\n      sources.\"markdown-it-anchor-5.3.0\"\n      sources.\"marked-0.8.2\"\n      sources.\"mdurl-1.0.1\"\n      sources.\"mkdirp-1.0.4\"\n      sources.\"requizzle-0.2.3\"\n      sources.\"sprintf-js-1.0.3\"\n      sources.\"strip-json-comments-3.1.1\"\n      sources.\"taffydb-2.6.2\"\n      sources.\"uc.micro-1.0.6\"\n      sources.\"underscore-1.10.2\"\n      sources.\"xmlcreate-2.0.3\"\n    ];\n    buildInputs = globalBuildInputs;\n    meta = {\n      description = \"An API documentation generator for JavaScript.\";\n      homepage = \"https://github.com/jsdoc/jsdoc#readme\";\n      license = \"Apache-2.0\";\n    };\n    production = true;\n    bypassCache = true;\n    reconstructLock = true;\n  };\n}\n"
  },
  {
    "path": "pkgs/jsdoc/package.json",
    "content": "[\"jsdoc\"]\n"
  },
  {
    "path": "pkgs/lib/default.nix",
    "content": "{ pkgs }:\n\nlet\n  update = import ./update.nix { inherit pkgs; };\nin\n  { inherit update; }\n  // update\n"
  },
  {
    "path": "pkgs/lib/update.nix",
    "content": "{ pkgs }:\n\nlet\n  inherit (pkgs) cacert nix jq curl gnused gnugrep coreutils;\nin {\n\n  updateFromGitHub = { owner, repo, path, branch }: ''\n    export SSL_CERT_FILE=${cacert}/etc/ssl/certs/ca-bundle.crt\n\n    github_rev() {\n      ${curl.bin}/bin/curl -sSf \"https://api.github.com/repos/$1/$2/branches/$3\" | \\\n        ${jq}/bin/jq '.commit.sha' | \\\n        ${gnused}/bin/sed 's/\"//g'\n    }\n\n    github_sha256() {\n      ${nix}/bin/nix-prefetch-url \\\n         --unpack \\\n         --type sha256 \\\n         \"https://github.com/$1/$2/archive/$3.tar.gz\" 2>&1 | \\\n         tail -1\n    }\n\n    echo \"=== ${owner}/${repo}@${branch} ===\"\n\n    echo -n \"Looking up latest revision ... \"\n    rev=$(github_rev \"${owner}\" \"${repo}\" \"${branch}\");\n    echo \"revision is \\`$rev\\`.\"\n\n    sha256=$(github_sha256 \"${owner}\" \"${repo}\" \"$rev\");\n    echo \"sha256 is \\`$sha256\\`.\"\n\n    if [ \"$sha256\" == \"\" ]; then\n      echo \"sha256 is not valid!\"\n      exit 2\n    fi\n    source_file=${path}\n    echo \"Content of source file (``$source_file``) written.\"\n    cat <<REPO | ${coreutils}/bin/tee \"$source_file\"\n    {\n      \"owner\": \"${owner}\",\n      \"repo\": \"${repo}\",\n      \"rev\": \"$rev\",\n      \"sha256\": \"$sha256\"\n    }\n    REPO\n    echo\n  '';\n\n}\n"
  },
  {
    "path": "pkgs/nixpkgs.json",
    "content": "{\n  \"owner\": \"NixOS\",\n  \"repo\": \"nixpkgs-channels\",\n  \"rev\": \"ed070354a9e307fdf20a94cb2af749738562385d\",\n  \"sha256\": \"05pqwg7s4w34v99yykb27031kc21x4n3f33szdi6wv11k4asjyfp\"\n}\n"
  },
  {
    "path": "pkgs/phlay/default.nix",
    "content": "{ fetchFromGitHub\n, python3Packages\n}:\npython3Packages.buildPythonApplication rec {\n  name = \"phlay-${version}\";\n  version = \"0.2.3\";\n  src = fetchFromGitHub {\n    owner = \"mystor\";\n    repo = \"phlay\";\n    rev = \"98fcbead18c785db24a4b62fad4a8a525b81f8e1\";\n    sha256 = \"1m5c7lq12pgcaab4xrifzi0axaxpx24kb9x2f017pb5ni7lbcg3s\";\n  };\n  meta = {\n    description = \"A command-line interface for Phabricator\";\n    longDescription = ''\n      Phlay is an alternative to Arcanist for submitting changes to Phabricator.\n\n      You might like Phlay if you do Mozilla development using git and\n      a \"commit series\" workflow.\n    '';\n  };\n  # phlay is designed as a single-file Python script with no\n  # dependencies outside the stdlib.\n  format = \"other\";\n  installPhase = \"mkdir -p $out/bin; cp phlay $out/bin\";\n}\n"
  },
  {
    "path": "pkgs/servo/default.nix",
    "content": "{ servoSrc ? null\n, lib\n, rustPlatform\n, pkgs\n}:\n\nlet\n\n  inherit (lib) updateFromGitHub;\n  inherit (pkgs) fetchFromGitHub curl dbus fontconfig freeglut freetype\n    gperf libxmi llvm mesa mesa_glu openssl pkgconfig makeWrapper writeText\n    xorg;\n  inherit (pkgs.stdenv) mkDerivation;\n  inherit (pkgs.lib) importJSON;\n  inherit (rustPlatform) buildRustPackage;\n  inherit (rustPlatform.rust) rustc cargo;\n\n  pythonPackages = pkgs.python3Packages;\n\n  src =\n    if servoSrc == null then\n      fetchFromGitHub (importJSON ./source.json)\n    else\n      servoSrc;\n\n  # TODO: figure out version from servoSrc\n  version = \"latest\";\n\n  # TODO: add possibility to test against wayland\n  xorgCompositorLibs = \"${xorg.libXcursor.out}/lib:${xorg.libXi.out}/lib\";\n\n  servobuild = writeText \"servobuild\" ''\n    [tools]\n    cache-dir = \"./downloads\"\n    cargo-home-dir = \"./.downloads/clones\n    system-rust = true\n    rust-root = \"${rustc}/bin/rustc\"\n    system-cargo = true\n    cargo-root = \"${cargo}/bin/cargo\"\n    [build]\n  '';\n\n  servoRust = buildRustPackage rec {\n    inherit src;\n    name = \"servo-rust-${version}\";\n    postUnpack = ''\n      pwd\n      ls -la \n      exit 100\n    '';\n    sourceRoot = \"servo/components/servo\";\n\n    depsSha256 = \"0ca0lc8mm8kczll5m03n5fwsr0540c2xbfi4nn9ksn0s4sap50yn\";\n\n    doCheck = false;\n  };\n\nin mkDerivation rec {\n  name = \"servo-${version}\";\n  src = servoSrc;\n\n  buildInputs = [\n    #cmake\n    curl\n    dbus\n    fontconfig\n    freeglut\n    freetype\n    gperf\n    libxmi\n    llvm\n    mesa\n    mesa_glu\n    openssl\n    pkgconfig\n    pythonPackages.pip\n    pythonPackages.virtualenv\n    xorg.libX11\n    xorg.libXmu\n\n    # nix stuff\n    makeWrapper\n    servoRust\n  ];\n  preConfigure = ''\n    ln -s ${servobuild} .servobuild\n  '';\n  postInstall = ''\n    wrapProgram \"$out/bin/servo\" --prefix LD_LIBRARY_PATH : \"${xorgCompositorLibs}\"\n  '';\n  shellHook = ''\n    # Servo tries to switch between libX11 and wayland at runtime so we have\n    # to provide a path\n    export LD_LIBRARY_PATH=${xorgCompositorLibs}:$LD_LIBRARY_PATH\n  '';\n  passthru.updateScript = updateFromGitHub {\n    owner = \"servo\";\n    repo = \"servo\";\n    branch = \"master\";\n    path = \"pkgs/servo/source.json\";\n  };\n}\n"
  },
  {
    "path": "release.nix",
    "content": "# To pin a specific version of nixpkgs, change the nixpkgsSrc argument.\n{ nixpkgsSrc ? <nixpkgs>\n, supportedSystems ? [ \"x86_64-linux\" \"i686-linux\" /* \"x86_64-darwin\" */\n    \"aarch64-linux\"\n  ]\n}:\n\nlet\n  lib = (import nixpkgsSrc {}).lib;\n\n  # Make an attribute set for each system, the builder is then specialized to\n  # use the selected system.\n  forEachSystem = systems: builder /* system -> stdenv -> pkgs */:\n    lib.genAttrs systems builder;\n\n  # Make an attribute set for each compiler, the builder is then be specialized\n  # to use the selected compiler.\n  forEachCompiler = compilers: builder: system:\n    builtins.listToAttrs (map (compiler: {\n      name = compiler;\n      value = builder compiler system;\n    }) compilers);\n\n\n  # Overide the previous derivation, with a different stdenv.\n  builder = path: compiler: system:\n    lib.getAttrFromPath path (import nixpkgsSrc {\n      inherit system;\n      overlays = [\n        # Add all packages from nixpkgs-mozilla.\n        (import ./default.nix)\n\n        # Define customStdenvs, which is a set of various compilers which can be\n        # used to compile the given package against.\n        (import ./compilers-overlay.nix)\n\n        # Use the following overlay to override the requested package from\n        # nixpkgs, with a custom stdenv taken from the compilers-overlay.\n        (self: super:\n          if compiler == null then {}\n          else lib.setAttrByPath path ((lib.getAttrFromPath path super).override {\n            stdenv = self.customStdenvs.\"${compiler}\";\n          }))\n      ];\n    });\n\n  build = path: { systems ? supportedSystems, compilers ? null }:\n    forEachSystem systems (\n      if compilers == null\n      then builder path null\n      else forEachCompiler compilers (builder path)\n    );\n\n  geckoCompilers = [\n    \"clang\"\n    \"clang36\"\n    \"clang37\"\n    \"clang38\"\n    \"clang5\"\n    \"clang6\"\n    \"clang7\"\n    \"clang12\"\n    \"clang13\"\n    \"gcc\"\n    \"gcc6\"\n    \"gcc5\"\n    \"gcc49\"\n    \"gcc48\"\n    #\"gcc474\"\n    #\"gcc473\"\n    #\"gcc472\"\n  ];\n\n  jobs = {\n\n    # For each system, and each compiler, create an attribute with the name of\n    # the system and compiler. Use this attribute name to select which\n    # environment you are interested in for building firefox.  These can be\n    # build using the following command:\n    #\n    #   $ nix-build release.nix -A gecko.x86_64-linux.clang -o firefox-x64\n    #   $ nix-build release.nix -A gecko.i686-linux.gcc48 -o firefox-x86\n    #\n    # If you are only interested in getting a build environment, the use the\n    # nix-shell command instead, which will skip the copy of Firefox sources,\n    # and pull the the dependencies needed for building firefox with this\n    # environment.\n    #\n    #   $ nix-shell release.nix -A gecko.i686-linux.gcc --pure --command '$CC --version'\n    #   $ nix-shell release.nix -A gecko.x86_64-linux.clang --pure\n    #\n    # As some of the test script of Gecko are checking against absolute path, a\n    # fake-FHS is provided for Gecko.  It can be accessed by appending\n    # \".fhs.env\" behind the previous commands:\n    #\n    #   $ nix-shell release.nix -A gecko.x86_64-linux.gcc.fhs.env\n    #\n    # Which will spawn a new shell where the closure of everything used to build\n    # Gecko would be part of the fake-root.\n    gecko = build [ \"devEnv\" \"gecko\" ] { compilers = geckoCompilers; };\n    latest = {\n      \"firefox-nightly-bin\" = build [ \"latest\" \"firefox-nightly-bin\" ];\n    };\n\n    git-cinnabar = build [ \"git-cinnabar\" ];\n  };\n\nin jobs\n"
  },
  {
    "path": "rust-overlay-install.sh",
    "content": "#!/bin/sh -e\n\ncd \"$(dirname \"$0\")\" || exit\n\noverlay_dir=$HOME/.config/nixpkgs/overlays\nname=rust-overlay.nix\n\necho Installing $name as an overlay\n\nset -x\nmkdir -p \"$overlay_dir\"\nln -s \"$PWD/$name\" \"$overlay_dir/$name\"\n"
  },
  {
    "path": "rust-overlay.nix",
    "content": "# This file provide a Rust overlay, which provides pre-packaged bleeding edge versions of rustc\n# and cargo.\nself: super:\n\nlet\n  fromTOML =\n    # nix 2.1 added the fromTOML builtin\n    if builtins ? fromTOML\n    then builtins.fromTOML\n    else (import ./lib/parseTOML.nix).fromTOML;\n\n  parseRustToolchain = file: with builtins;\n    if file == null then\n      { }\n    # Parse *.toml files as TOML\n    else if self.lib.strings.hasSuffix \".toml\" file then\n      ({ channel ? null, date ? null, ... }: { inherit channel date; })\n        (fromTOML (readFile file)).toolchain\n    else\n    # Otherwise, assume the file contains just a rust version string\n      let\n        str = readFile file;\n        # Match toolchain descriptions of type \"nightly\" or \"nightly-2020-01-01\"\n        channel_by_name = match \"([a-z]+)(-([0-9]{4}-[0-9]{2}-[0-9]{2}))?.*\" str;\n        # Match toolchain descriptions of type \"1.34.0\" or \"1.34.0-2019-04-10\"\n        channel_by_version = match \"([0-9]+\\\\.[0-9]+\\\\.[0-9]+)(-([0-9]{4}-[0-9]{2}-[0-9]{2}))?.*\" str;\n      in\n      (x: { channel = head x; date = (head (tail (tail x))); }) (\n        if channel_by_name != null then\n          channel_by_name\n        else\n          channel_by_version\n      );\n\n  # In NixOS 24.11, the `pkgs.rust.toRustTarget` has become deprecated in favor of the\n  # `.rust.rustcTarget` attribute of the platform. This function provides backwards compatibility in\n  # case the caller is using a nixpkgs older than NixOS 24.11.\n  toRustTargetCompat = platform:\n    if platform ? rust && platform.rust ? rustcTarget\n    then platform.rust.rustcTarget\n    else super.rust.toRustTarget platform;\n\n  # See https://github.com/rust-lang-nursery/rustup.rs/blob/master/src/dist/src/dist.rs\n  defaultDistRoot = \"https://static.rust-lang.org\";\n  manifest_v1_url = {\n    dist_root ? defaultDistRoot + \"/dist\",\n    date ? null,\n    staging ? false,\n    # A channel can be \"nightly\", \"beta\", \"stable\", or \"\\d{1}\\.\\d{1,3}\\.\\d{1,2}\".\n    channel ? \"nightly\",\n    # A path that points to a rust-toolchain file, typically ./rust-toolchain.\n    rustToolchain ? null,\n    ...\n  }:\n    let args = { inherit channel date; } // parseRustToolchain rustToolchain; in\n    let inherit (args) date channel; in\n    if date == null && staging == false\n    then \"${dist_root}/channel-rust-${channel}\"\n    else if date != null && staging == false\n    then \"${dist_root}/${date}/channel-rust-${channel}\"\n    else if date == null && staging == true\n    then \"${dist_root}/staging/channel-rust-${channel}\"\n    else throw \"not a real-world case\";\n\n  manifest_v2_url = args: (manifest_v1_url args) + \".toml\";\n\n  getComponentsWithFixedPlatform = pkgs: pkgname: stdenv:\n    let\n      pkg = pkgs.${pkgname};\n      srcInfo = pkg.target.${toRustTargetCompat stdenv.targetPlatform} or pkg.target.\"*\";\n      components = srcInfo.components or [];\n      componentNamesList =\n        builtins.map (pkg: pkg.pkg) (builtins.filter (pkg: (pkg.target != \"*\")) components);\n    in\n      componentNamesList;\n\n  getExtensions = pkgs: pkgname: stdenv:\n    let\n      inherit (super.lib) unique;\n      pkg = pkgs.${pkgname};\n      srcInfo = pkg.target.${toRustTargetCompat stdenv.targetPlatform} or pkg.target.\"*\";\n      extensions = srcInfo.extensions or [];\n      extensionNamesList = unique (builtins.map (pkg: pkg.pkg) extensions);\n    in\n      extensionNamesList;\n\n  hasTarget = pkgs: pkgname: target:\n    pkgs ? ${pkgname}.target.${target};\n\n  getTuples = pkgs: name: targets:\n    builtins.map (target: { inherit name target; }) (builtins.filter (target: hasTarget pkgs name target) targets);\n\n  # In the manifest, a package might have different components which are bundled with it, as opposed as the extensions which can be added.\n  # By default, a package will include the components for the same architecture, and offers them as extensions for other architectures.\n  #\n  # This functions returns a list of { name, target } attribute sets, which includes the current system package, and all its components for the selected targets.\n  # The list contains the package for the pkgTargets as well as the packages for components for all compTargets\n  getTargetPkgTuples = pkgs: pkgname: pkgTargets: compTargets: stdenv:\n    let\n      inherit (builtins) elem;\n      inherit (super.lib) intersectLists;\n      components = getComponentsWithFixedPlatform pkgs pkgname stdenv;\n      extensions = getExtensions pkgs pkgname stdenv;\n      compExtIntersect = intersectLists components extensions;\n      tuples = (getTuples pkgs pkgname pkgTargets) ++ (builtins.map (name: getTuples pkgs name compTargets) compExtIntersect);\n    in\n      tuples;\n\n  getFetchUrl = pkgs: pkgname: target: stdenv: fetchurl:\n    let\n      pkg = pkgs.${pkgname};\n      srcInfo = pkg.target.${target};\n    in\n      (super.fetchurl { url = srcInfo.xz_url or srcInfo.url; sha256 = srcInfo.xz_hash or srcInfo.hash; });\n\n  checkMissingExtensions = pkgs: pkgname: stdenv: extensions:\n    let\n      inherit (builtins) head;\n      inherit (super.lib) concatStringsSep subtractLists;\n      availableExtensions = getExtensions pkgs pkgname stdenv;\n      missingExtensions = subtractLists availableExtensions extensions;\n      extensionsToInstall =\n        if missingExtensions == [] then extensions else throw ''\n          While compiling ${pkgname}: the extension ${head missingExtensions} is not available.\n          Select extensions from the following list:\n          ${concatStringsSep \"\\n\" availableExtensions}'';\n    in\n      extensionsToInstall;\n\n  getComponents = pkgs: pkgname: targets: extensions: targetExtensions: stdenv: fetchurl:\n    let\n      inherit (builtins) head map;\n      inherit (super.lib) flatten remove subtractLists unique;\n      targetExtensionsToInstall = checkMissingExtensions pkgs pkgname stdenv targetExtensions;\n      extensionsToInstall = checkMissingExtensions pkgs pkgname stdenv extensions;\n      hostTargets = [ \"*\" (toRustTargetCompat stdenv.hostPlatform) (toRustTargetCompat stdenv.targetPlatform) ];\n      pkgTuples = flatten (getTargetPkgTuples pkgs pkgname hostTargets targets stdenv);\n      extensionTuples = flatten (map (name: getTargetPkgTuples pkgs name hostTargets targets stdenv) extensionsToInstall);\n      targetExtensionTuples = flatten (map (name: getTargetPkgTuples pkgs name targets targets stdenv) targetExtensionsToInstall);\n      pkgsTuples = pkgTuples ++ extensionTuples ++ targetExtensionTuples;\n      missingTargets = subtractLists (map (tuple: tuple.target) pkgsTuples) (remove \"*\" targets);\n      pkgsTuplesToInstall =\n        if missingTargets == [] then pkgsTuples else throw ''\n          While compiling ${pkgname}: the target ${head missingTargets} is not available for any package.'';\n    in\n      map (tuple: { name = tuple.name; src = (getFetchUrl pkgs tuple.name tuple.target stdenv fetchurl); }) pkgsTuplesToInstall;\n\n  installComponents = stdenv: namesAndSrcs:\n    let\n      inherit (builtins) map;\n      installComponent = name: src:\n        stdenv.mkDerivation {\n          inherit name;\n          inherit src;\n\n          # No point copying src to a build server, then copying back the\n          # entire unpacked contents after just a little twiddling.\n          preferLocalBuild = true;\n\n          # (@nbp) TODO: Check on Windows and Mac.\n          # This code is inspired by patchelf/setup-hook.sh to iterate over all binaries.\n          installPhase = ''\n            patchShebangs install.sh\n            CFG_DISABLE_LDCONFIG=1 ./install.sh --prefix=$out --verbose\n\n            setInterpreter() {\n              local dir=\"$1\"\n              [ -e \"$dir\" ] || return 0\n\n              echo \"Patching interpreter of ELF executables and libraries in $dir\"\n              local i\n              while IFS= read -r -d ''$'\\0' i; do\n                if [[ \"$i\" =~ .build-id ]]; then continue; fi\n                if ! isELF \"$i\"; then continue; fi\n                echo \"setting interpreter of $i\"\n\n                if [[ -x \"$i\" ]]; then\n                  # Handle executables\n                  patchelf \\\n                    --set-interpreter \"$(cat $NIX_CC/nix-support/dynamic-linker)\" \\\n                    --set-rpath \"${super.lib.makeLibraryPath [ self.zlib ]}:$out/lib\" \\\n                    \"$i\" || true\n                else\n                  # Handle libraries\n                  patchelf \\\n                    --set-rpath \"${super.lib.makeLibraryPath [ self.zlib ]}:$out/lib\" \\\n                    \"$i\" || true\n                fi\n              done < <(find \"$dir\" -type f -print0)\n            }\n\n            setInterpreter $out\n          '';\n\n          postFixup = ''\n            # Function moves well-known files from etc/\n            handleEtc() {\n              local oldIFS=\"$IFS\"\n\n              # Directories we are aware of, given as substitution lists\n              for paths in \\\n                \"etc/bash_completion.d\",\"share/bash_completion/completions\",\"etc/bash_completions.d\",\"share/bash_completions/completions\";\n                do\n                # Some directoties may be missing in some versions. If so we just skip them.\n                # See https://github.com/mozilla/nixpkgs-mozilla/issues/48 for more infomation.\n                if [ ! -e $paths ]; then continue; fi\n\n                IFS=\",\"\n                set -- $paths\n                IFS=\"$oldIFS\"\n\n                local orig_path=\"$1\"\n                local wanted_path=\"$2\"\n\n                # Rename the files\n                if [ -d ./\"$orig_path\" ]; then\n                  mkdir -p \"$(dirname ./\"$wanted_path\")\"\n                fi\n                mv -v ./\"$orig_path\" ./\"$wanted_path\"\n\n                # Fail explicitly if etc is not empty so we can add it to the list and/or report it upstream\n                rmdir ./etc || {\n                  echo Installer tries to install to /etc:\n                  find ./etc\n                  exit 1\n                }\n              done\n            }\n\n            if [ -d \"$out\"/etc ]; then\n              pushd \"$out\"\n              handleEtc\n              popd\n            fi\n          '';\n\n          dontStrip = true;\n        };\n    in\n      map (nameAndSrc: (installComponent nameAndSrc.name nameAndSrc.src)) namesAndSrcs;\n\n  # Manifest files are organized as follow:\n  # { date = \"2017-03-03\";\n  #   pkg.cargo.version= \"0.18.0-nightly (5db6d64 2017-03-03)\";\n  #   pkg.cargo.target.x86_64-unknown-linux-gnu = {\n  #     available = true;\n  #     hash = \"abce...\"; # sha256\n  #     url = \"https://static.rust-lang.org/dist/....tar.gz\";\n  #     xz_hash = \"abce...\"; # sha256\n  #     xz_url = \"https://static.rust-lang.org/dist/....tar.xz\";\n  #   };\n  # }\n  #\n  # The packages available usually are:\n  #   cargo, rust-analysis, rust-docs, rust-src, rust-std, rustc, and\n  #   rust, which aggregates them in one package.\n  #\n  # For each package the following options are available:\n  #   extensions        - The extensions that should be installed for the package.\n  #                       For example, install the package rust and add the extension rust-src.\n  #   targets           - The package will always be installed for the host system, but with this option\n  #                       extra targets can be specified, e.g. \"mips-unknown-linux-musl\". The target\n  #                       will only apply to components of the package that support being installed for\n  #                       a different architecture. For example, the rust package will install rust-std\n  #                       for the host system and the targets.\n  #   targetExtensions  - If you want to force extensions to be installed for the given targets, this is your option.\n  #                       All extensions in this list will be installed for the target architectures.\n  #                       *Attention* If you want to install an extension like rust-src, that has no fixed architecture (arch *),\n  #                       you will need to specify this extension in the extensions options or it will not be installed!\n  fromManifestFile = manifest: { stdenv, lib, fetchurl, patchelf }:\n    let\n      inherit (builtins) elemAt;\n      inherit (super) makeOverridable;\n      inherit (super.lib) flip mapAttrs;\n      pkgs = fromTOML (builtins.readFile manifest);\n    in\n    flip mapAttrs pkgs.pkg (name: pkg:\n      makeOverridable ({extensions, targets, targetExtensions}:\n        let\n          version' = builtins.match \"([^ ]*) [(]([^ ]*) ([^ ]*)[)]\" pkg.version;\n          version = \"${elemAt version' 0}-${elemAt version' 2}-${elemAt version' 1}\";\n          namesAndSrcs = getComponents pkgs.pkg name targets extensions targetExtensions stdenv fetchurl;\n          components = installComponents stdenv namesAndSrcs;\n          componentsOuts = builtins.map (comp: (super.lib.strings.escapeNixString (super.lib.getOutput \"out\" comp))) components;\n        in\n          super.pkgs.symlinkJoin {\n            name = name + \"-\" + version;\n            paths = components;\n            postBuild = ''\n              # If rustc or rustdoc is in the derivation, we need to copy their\n              # executable into the final derivation. This is required\n              # for making them find the correct SYSROOT.\n              # Similarly, we copy the python files for gdb pretty-printers since\n              # its auto-load-safe-path mechanism doesn't like symlinked files.\n              for target in $out/bin/{rustc,rustdoc} $out/lib/rustlib/etc/*.py; do\n                if [ -e $target ]; then\n                  cp --remove-destination \"$(realpath -e $target)\" $target\n\n                  # The SYSROOT is determined by using the librustc_driver-*.so.\n                  # So, we need to point to the *.so files in our derivation.\n                  chmod u+w $target\n                  patchelf --set-rpath \"$out/lib\" $target || true\n                fi\n              done\n\n              # Here we copy the librustc_driver-*.so to our derivation.\n              # The SYSROOT is determined based on the path of this library.\n              if test \"\" != $out/lib/librustc_driver-*.so &> /dev/null; then\n                RUSTC_DRIVER_PATH=$(realpath -e $out/lib/librustc_driver-*.so)\n                rm $out/lib/librustc_driver-*.so\n                cp $RUSTC_DRIVER_PATH $out/lib/\n              fi\n            '';\n\n            # Export the manifest file as part of the nix-support files such\n            # that one can compute the sha256 of a manifest to freeze it for\n            # reproducible builds.\n            MANIFEST_FILE = manifest;\n            postInstall = ''\n              mkdir $out/nix-support\n              cp $MANIFEST_FILE $out/nix-support/manifest.toml\n            '';\n\n            # Add the compiler as part of the propagated build inputs in order\n            # to run:\n            #\n            #    $ nix-shell -p rustChannels.stable.rust\n            #\n            # And get a fully working Rust compiler, with the stdenv linker.\n            propagatedBuildInputs = [ stdenv.cc ];\n\n            meta.platforms = lib.platforms.all;\n          }\n      ) { extensions = []; targets = []; targetExtensions = []; }\n    );\n\n  fromManifest = sha256: manifest: { stdenv, lib, fetchurl, patchelf }:\n    let manifestFile = if sha256 == null then builtins.fetchurl manifest else fetchurl { url = manifest; inherit sha256; };\n    in fromManifestFile manifestFile { inherit stdenv lib fetchurl patchelf; };\n\nin\n\nrec {\n  lib = super.lib // {\n    inherit fromTOML;\n    rustLib = {\n      inherit fromManifest fromManifestFile manifest_v2_url;\n    };\n  };\n\n  rustChannelOf = { sha256 ? null, ... } @ manifest_args: fromManifest\n    sha256 (manifest_v2_url manifest_args)\n    { inherit (super) lib;\n      inherit (self) stdenv fetchurl patchelf;\n    } ;\n\n  # Set of packages which are automagically updated. Do not rely on these for\n  # reproducible builds.\n  latest = (super.latest or {}) // {\n    rustChannels = {\n      nightly = rustChannelOf { channel = \"nightly\"; };\n      beta    = rustChannelOf { channel = \"beta\"; };\n      stable  = rustChannelOf { channel = \"stable\"; };\n    };\n  };\n\n  # Helper builder\n  rustChannelOfTargets = channel: date: targets:\n    (rustChannelOf { inherit channel date; })\n      .rust.override { inherit targets; };\n\n  # For backward compatibility\n  rustChannels = latest.rustChannels;\n\n  # For each channel:\n  #   latest.rustChannels.nightly.cargo\n  #   latest.rustChannels.nightly.rust   # Aggregate all others. (recommended)\n  #   latest.rustChannels.nightly.rustc\n  #   latest.rustChannels.nightly.rust-analysis\n  #   latest.rustChannels.nightly.rust-docs\n  #   latest.rustChannels.nightly.rust-src\n  #   latest.rustChannels.nightly.rust-std\n\n  # For a specific date:\n  #   (rustChannelOf { date = \"2017-06-06\"; channel = \"beta\"; }).rust\n}\n"
  },
  {
    "path": "rust-src-overlay.nix",
    "content": "# Overlay that builds on top of rust-overlay.nix.\n# Adds rust-src component to all channels which is helpful for racer, intellij, ...\n\nself: super:\n\nlet mapAttrs = super.lib.mapAttrs;\n    flip = super.lib.flip;\nin {\n  # install stable rust with rust-src:\n  # \"nix-env -i -A nixos.latest.rustChannels.stable.rust\"\n\n  latest.rustChannels =\n    flip mapAttrs super.latest.rustChannels (name: value: value // {\n      rust = value.rust.override {\n        extensions = [\"rust-src\"];\n      };\n    });\n}\n"
  },
  {
    "path": "update.nix",
    "content": "let\n  _pkgs = import <nixpkgs> {};\n  _nixpkgs = _pkgs.fetchFromGitHub (_pkgs.lib.importJSON ./pkgs/nixpkgs.json);\nin\n\n{ pkgs ? import _nixpkgs {}\n, package ? null\n, maintainer ? null\n, dont_prompt ? false\n}:\n\n# TODO: add assert statements\n\nlet\n\n  pkgs-mozilla = import ./default.nix { inherit pkgs; };\n\n  dont_prompt_str = if dont_prompt then \"yes\" else \"no\";\n\n  packagesWith = cond: return: set:\n    pkgs.lib.flatten\n      (pkgs.lib.mapAttrsToList\n        (name: pkg:\n          let\n            result = builtins.tryEval (\n              if pkgs.lib.isDerivation pkg && cond name pkg\n                then [(return name pkg)]\n              else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false\n                then packagesWith cond return pkg\n              else []\n            );\n          in\n            if result.success then result.value\n            else []\n        )\n        set\n      );\n\n  packagesWithUpdateScriptAndMaintainer = maintainer':\n    let\n      maintainer =\n        if ! builtins.hasAttr maintainer' pkgs.lib.maintainers then\n          builtins.throw \"Maintainer with name `${maintainer'} does not exist in `lib/maintainers.nix`.\"\n        else\n          builtins.getAttr maintainer' pkgs.lib.maintainers;\n    in\n      packagesWith (name: pkg: builtins.hasAttr \"updateScript\" pkg &&\n                                 (if builtins.hasAttr \"maintainers\" pkg.meta\n                                   then (if builtins.isList pkg.meta.maintainers\n                                           then builtins.elem maintainer pkg.meta.maintainers\n                                           else maintainer == pkg.meta.maintainers\n                                        )\n                                   else false\n                                 )\n                   )\n                   (name: pkg: pkg)\n                   pkgs-mozilla;\n\n  packageByName = name:\n    let\n        package = pkgs.lib.attrByPath (pkgs.lib.splitString \".\" name) null pkgs-mozilla;\n    in\n      if package == null then\n        builtins.throw \"Package with an attribute name `${name}` does not exists.\"\n      else if ! builtins.hasAttr \"updateScript\" package then\n        builtins.throw \"Package with an attribute name `${name}` does have an `passthru.updateScript` defined.\"\n      else\n        package;\n\n  packages =\n    if package != null then\n      [ (packageByName package) ]\n    else if maintainer != null then\n      packagesWithUpdateScriptAndMaintainer maintainer\n    else\n      builtins.throw \"No arguments provided.\\n\\n${helpText}\";\n\n  helpText = ''\n    Please run:\n\n        % nix-shell maintainers/scripts/update.nix --argstr maintainer garbas\n\n    to run all update scripts for all packages that lists \\`garbas\\` as a maintainer\n    and have \\`updateScript\\` defined, or:\n\n        % nix-shell maintainers/scripts/update.nix --argstr package garbas\n\n    to run update script for specific package.\n  '';\n\n  runUpdateScript = package: ''\n    echo -ne \" - ${package.name}: UPDATING ...\"\\\\r\n    ${package.updateScript} &> ${(builtins.parseDrvName package.name).name}.log\n    CODE=$?\n    if [ \"$CODE\" != \"0\" ]; then\n      echo \" - ${package.name}: ERROR       \"\n      echo \"\"\n      echo \"--- SHOWING ERROR LOG FOR ${package.name} ----------------------\"\n      echo \"\"\n      cat ${(builtins.parseDrvName package.name).name}.log\n      echo \"\"\n      echo \"--- SHOWING ERROR LOG FOR ${package.name} ----------------------\"\n      exit $CODE\n    else\n      rm ${(builtins.parseDrvName package.name).name}.log\n    fi\n    echo \" - ${package.name}: DONE.       \"\n  '';\n\nin pkgs.stdenv.mkDerivation {\n  name = \"nixpkgs-mozilla-update-script\";\n  buildCommand = ''\n    echo \"\"\n    echo \"----------------------------------------------------------------\"\n    echo \"\"\n    echo \"Not possible to update packages using \\`nix-build\\`\"\n    echo \"\"\n    echo \"${helpText}\"\n    echo \"----------------------------------------------------------------\"\n    exit 1\n  '';\n  shellHook = ''\n    echo \"\"\n    echo \"Going to be running update for following packages:\"\n    echo \"${builtins.concatStringsSep \"\\n\" (map (x: \" - ${x.name}\") packages)}\"\n    echo \"\"\n    if [ \"${dont_prompt_str}\" = \"no\" ]; then\n      read -n1 -r -p \"Press space to continue...\" confirm\n    else\n      confirm=\"\"\n    fi\n    if [ \"$confirm\" = \"\" ]; then\n      echo \"\"\n      echo \"Running update for:\"\n      ${builtins.concatStringsSep \"\\n\" (map runUpdateScript packages)}\n      echo \"\"\n      echo \"Packages updated!\"\n      exit 0\n    else\n      echo \"Aborting!\"\n      exit 1\n    fi\n  '';\n}\n"
  }
]