[
  {
    "path": ".dccache",
    "content": "[{\"/Users/dhinak/Documents/GitHub/build-repo/.pylintrc\":\"1\",\"/Users/dhinak/Documents/GitHub/build-repo/add.py\":\"2\",\"/Users/dhinak/Documents/GitHub/build-repo/builder.py\":\"3\",\"/Users/dhinak/Documents/GitHub/build-repo/check_ratelimit.py\":\"4\",\"/Users/dhinak/Documents/GitHub/build-repo/downloader.py\":\"5\",\"/Users/dhinak/Documents/GitHub/build-repo/import_old.py\":\"6\",\"/Users/dhinak/Documents/GitHub/build-repo/parallel_check.py\":\"7\",\"/Users/dhinak/Documents/GitHub/build-repo/sort_plugins.py\":\"8\",\"/Users/dhinak/Documents/GitHub/build-repo/test_release.py\":\"9\",\"/Users/dhinak/Documents/GitHub/build-repo/update_config.py\":\"10\",\"/Users/dhinak/Documents/GitHub/build-repo/updater.py\":\"11\"},[359,1609043345880.1628,\"12\"],[6382,1609043323409.1133,\"13\"],[12077,1609043323410.3127,\"14\"],[217,1609043323410.6653,\"15\"],[2751,1609043323411.064,\"16\"],[6461,1609043323411.679,\"17\"],[1378,1609043323412.0698,\"18\"],[213,1609043323412.326,\"19\"],[1799,1609043323412.6558,\"20\"],[5805,1609043323413.2163,\"21\"],[6626,1609043323413.8252,\"22\"],\"3317598182cbaacada866694f5ad412226670324676a10d4bdba37ba91d7ab1b\",\"2b6989899fa7539a43eeaf2785f6fe515ae8cc6ad8c372c5a047c49349d8d272\",\"4057a856d5f40a267ef5323de9ceb7027e2f6ca3dacfb7bbe6926885d2044061\",\"edd765034a7f1491508d2062beb6246d73f34f5e51fa5bfa2c9797406f119bfb\",\"505a3205046744882bf07457d8f7aded0d6b2920c5dff7257a5ba9267b89ba18\",\"8d8980ea29ddae82216bafcc0f6a219211a34fd44b3e1a5031809f9c8668b99e\",\"a204565775c3bb211df6792461cef60ac7156cdb368069477efca57696f49346\",\"8ce81cbb63bf426bd2898a084f470805a2ed8e7f6441242900e3432f44bac717\",\"88d36be415255600d3441e6296e11d0c8a2f654fd67aa05196a4b7b6b1b07e62\",\"8c2148e850e5134996a936325d1420540983d24f3cef72d726485d4c07f7647c\",\"e66576fc1a1ff4297d34c401c17e51c4dca9caca3192af4ff0baf001be7801f4\"]"
  },
  {
    "path": ".flake8",
    "content": "[flake8]\nextend-ignore = E501, E203"
  },
  {
    "path": ".github/workflows/workflow.yaml",
    "content": "name: Build\non:\n  push:\n  schedule:\n    - cron: '*/5 * * * *'\n  workflow_dispatch:\nenv:\n  FORCE_INSTALL: 1\n  HAS_OPENSSL_BUILD: 1\n  HAS_OPENSSL_W32BUILD: 0\n  ACID32: 1\n  HOMEBREW_NO_INSTALL_CLEANUP: 1\n  HOMEBREW_NO_AUTO_UPDATE: 1\n  PROD: ${{ github.ref == 'refs/heads/github-actions' }}\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.ref }}\njobs:\n  build:\n    runs-on: m1_monterey\n    steps:\n      - name: Checkout Repository\n        uses: actions/checkout@v4\n        # with:\n        #   ref: github-actions\n      - name: Set up Python 3\n        run: brew install python3 python-tk\n#       - uses: actions/setup-python@v4\n#         with:\n#           python-version: '3.10'\n#           cache: pip\n      - name: Install Python Dependencies\n        run: |\n          python3 -m pip install -U pip wheel\n          python3 -m pip install hammock python-dateutil datetime termcolor2 purl python-magic humanize gitpython cryptography macholib\n          echo \"OVERRIDE_PYTHON3=$(which python3)\" >> \"$GITHUB_ENV\"\n      # - name: Check Parallel\n      #   run: python3 -u parallel_check.py ${{ secrets.GITHUB_TOKEN }}\n      - name: Install Build Dependencies\n        run: | # Needing for VoodooI2C to build without actually having cldoc & cpplint\n          brew tap FiloSottile/homebrew-musl-cross\n          brew install libmagic mingw-w64 openssl musl-cross\n          mkdir wrappers\n          printf \"#!/bin/bash\\nexit 0\" > wrappers/cldoc\n          printf \"#!/bin/bash\\nexit 0\" > wrappers/cpplint\n          chmod +x wrappers/cldoc wrappers/cpplint\n          echo \"$(readlink -f wrappers)\" >> \"$GITHUB_PATH\"\n      - uses: fregante/setup-git-user@2e28d51939d2a84005a917d2f844090637f435f8\n      - name: Set Up Working Tree\n        uses: actions/checkout@v4\n        with:\n          ref: builds\n          path: Config\n      - name: Check Ratelimit\n        run: python3 -u check_ratelimit.py ${{ secrets.GITHUB_TOKEN }}\n      - name: Run Builder\n        run: python3 -u updater.py ${{ secrets.GITHUB_TOKEN }} ${{ secrets.WEBHOOK_URL }} ${{ secrets.PAYLOAD_KEY }}\n        env:\n          JOB_NAME: ${{ github.job }}\n      - name: Check Ratelimit\n        run: python3 -u check_ratelimit.py ${{ secrets.GITHUB_TOKEN }}\n      - name: Upload Artifact\n        uses: actions/upload-artifact@v4\n        if: ${{ env.PROD == 'false' }}\n        with:\n          name: Build\n          path: Config\n"
  },
  {
    "path": ".gitignore",
    "content": "gh token.txt\nLilu-and-Friends/\n__pycache__/\nBuilds/\nConfig/\nTemp/\n.vscode/\n.DS_Store"
  },
  {
    "path": ".pylintrc",
    "content": "[MASTER]\n\ninit-hook=\"from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))\"\n\n[MESSAGES CONTROL]\n\ndisable=unused-import,\n        subprocess-run-check,\n        line-too-long,\n        too-few-public-methods,\n        missing-module-docstring,\n        missing-class-docstring,\n        missing-function-docstring"
  },
  {
    "path": "README.md",
    "content": "# build-repo\n\n![Build](https://github.com/dortania/build-repo/workflows/Build/badge.svg)\n\nCredit CorpNewt for Lilu & Friends, where some functions originate from and the inspiration for this project."
  },
  {
    "path": "add.py",
    "content": "import datetime\nimport hashlib\nimport json\nimport os\nimport time\nfrom pathlib import Path\n\nimport dateutil.parser\nimport git\nimport magic\nimport purl\nfrom hammock import Hammock as hammock\n\nfrom config_mgmt import save_config\n\nmime = magic.Magic(mime=True)\n\n\ndef hash_file(file_path: Path):\n    return hashlib.sha256(file_path.read_bytes()).hexdigest()\n\n\ndef expand_globs(str_path: str):\n    path = Path(str_path)\n    parts = path.parts[1:] if path.is_absolute() else path.parts\n    return list(Path(path.root).glob(str(Path(\"\").joinpath(*parts))))\n\n\ndef upload_release_asset(release_id, token, file_path: Path):\n    upload_url = hammock(\"https://api.github.com/repos/dortania/build-repo/releases/\" + str(release_id), auth=(\"github-actions\", token)).GET().json()\n    try:\n        upload_url = upload_url[\"upload_url\"]\n    except Exception:\n        print(upload_url)\n        raise\n    mime_type = mime.from_file(str(file_path.resolve()))\n    if not mime_type[0]:\n        print(\"Failed to guess mime type!\")\n        raise RuntimeError\n\n    asset_upload = hammock(str(purl.Template(upload_url).expand({\"name\": file_path.name, \"label\": file_path.name})), auth=(\"github-actions\", token)).POST(\n        data=file_path.read_bytes(),\n        headers={\"content-type\": mime_type}\n    )\n    return asset_upload.json()[\"browser_download_url\"]\n\n\ndef paginate(url, token):\n    url = hammock(url, auth=(\"github-actions\", token)).GET()\n    if url.links == {}:\n        return url.json()\n    else:\n        container = url.json()\n        while url.links.get(\"next\"):\n            url = hammock(url.links[\"next\"][\"url\"], auth=(\"github-actions\", token)).GET()\n            container += url.json()\n        return container\n\n\ndef add_built(plugin, token):\n    plugin_info = plugin[\"plugin\"]\n    commit_info = plugin[\"commit\"]\n    files = plugin[\"files\"]\n\n    script_dir = Path(__file__).parent.absolute()\n    config_path = script_dir / Path(\"Config/config.json\")\n    config_path.touch()\n    config = json.load(config_path.open())\n\n    name = plugin_info[\"Name\"]\n    plugin_type = plugin_info.get(\"Type\", \"Kext\")\n\n    ind = None\n\n    if not config.get(name, None):\n        config[name] = {}\n    if not config[name].get(\"type\", None):\n        config[name][\"type\"] = plugin_type\n    if not config[name].get(\"versions\", None):\n        config[name][\"versions\"] = []\n\n    release = {}\n    if config[name][\"versions\"]:\n        config[name][\"versions\"] = [i for i in config[name][\"versions\"] if not (i.get(\"commit\", {}).get(\"sha\", None) == commit_info[\"sha\"])]\n\n    release[\"commit\"] = {\"sha\": commit_info[\"sha\"], \"message\": commit_info[\"commit\"][\"message\"], \"url\": commit_info[\"html_url\"], \"tree_url\": commit_info[\"html_url\"].replace(\"/commit/\", \"/tree/\")}\n    release[\"version\"] = files[\"version\"]\n    release[\"date_built\"] = datetime.datetime.now(tz=datetime.timezone.utc).isoformat()\n    release[\"date_committed\"] = dateutil.parser.parse(commit_info[\"commit\"][\"committer\"][\"date\"]).isoformat()\n    release[\"date_authored\"] = dateutil.parser.parse(commit_info[\"commit\"][\"author\"][\"date\"]).isoformat()\n    release[\"source\"] = \"built\"\n\n    if os.environ.get(\"PROD\", \"false\") == \"true\":\n        releases_url = hammock(\"https://api.github.com/repos/dortania/build-repo/releases\", auth=(\"github-actions\", token))\n\n        # Delete previous releases\n        for i in paginate(\"https://api.github.com/repos/dortania/build-repo/releases\", token):\n            if i[\"name\"] == (name + \" \" + release[\"commit\"][\"sha\"][:7]):\n                print(\"\\tDeleting previous release...\")\n                releases_url(i[\"id\"]).DELETE()\n                time.sleep(3)  # Prevent race conditions\n\n        # Delete tags\n        check_tag = hammock(\"https://api.github.com/repos/dortania/build-repo/git/refs/tags/\" + name + \"-\" + release[\"commit\"][\"sha\"][:7], auth=(\"github-actions\", token))\n        if check_tag.GET().status_code != 404:\n            print(\"\\tDeleting previous tag...\")\n            check_tag.DELETE()\n            time.sleep(3)  # Prevent race conditions\n\n        # Create release\n        create_release = releases_url.POST(json={\n            \"tag_name\": name + \"-\" + release[\"commit\"][\"sha\"][:7],\n            \"target_commitish\": \"builds\",\n            \"name\": name + \" \" + release[\"commit\"][\"sha\"][:7]\n        })\n        # print(create_release.json()[\"id\"])\n        release[\"release\"] = {\"id\": create_release.json()[\"id\"], \"url\": create_release.json()[\"html_url\"]}\n\n    if not release.get(\"hashes\", None):\n        release[\"hashes\"] = {\"debug\": {\"sha256\": \"\"}, \"release\": {\"sha256\": \"\"}}\n\n    release[\"hashes\"][\"debug\"] = {\"sha256\": hash_file(files[\"debug\"])}\n    release[\"hashes\"][\"release\"] = {\"sha256\": hash_file(files[\"release\"])}\n\n    if files[\"extras\"]:\n        for file in files[\"extras\"]:\n            release[\"hashes\"][file.name] = {\"sha256\": hash_file(file)}\n\n    if os.environ.get(\"PROD\", \"false\") == \"true\":\n        if not release.get(\"links\", None):\n            release[\"links\"] = {}\n\n        for i in [\"debug\", \"release\"]:\n            release[\"links\"][i] = upload_release_asset(release[\"release\"][\"id\"], token, files[i])\n\n        if files[\"extras\"]:\n            if not release.get(\"extras\", None):\n                release[\"extras\"] = {}\n            for file in files[\"extras\"]:\n                release[\"extras\"][file.name] = upload_release_asset(release[\"release\"][\"id\"], token, file)\n        new_line = \"\\n\"  # No escapes in f-strings\n\n        release[\"release\"][\"description\"] = f\"\"\"**Changes:**\n{release['commit']['message'].strip()}\n[View on GitHub]({release['commit']['url']}) ([browse tree]({release['commit']['tree_url']}))\n\n**Hashes**:\n**Debug:**\n{files[\"debug\"].name + ': ' + release['hashes']['debug'][\"sha256\"]}\n**Release:**\n{files[\"release\"].name + ': ' + release['hashes']['release'][\"sha256\"]}\n{'**Extras:**' if files[\"extras\"] else ''}\n{new_line.join([(file.name + ': ' + release['hashes'][file.name]['sha256']) for file in files[\"extras\"]]) if files[\"extras\"] else ''}\n\"\"\".strip()\n\n        hammock(\"https://api.github.com/repos/dortania/build-repo/releases/\" + str(release[\"release\"][\"id\"]), auth=(\"github-actions\", token)).POST(json={\n            \"body\": release[\"release\"][\"description\"]\n        })\n\n    config[name][\"versions\"].insert(0, release)\n    config[name][\"versions\"].sort(key=lambda x: (x[\"date_committed\"], x[\"date_authored\"]), reverse=True)\n    save_config(config)\n\n    if os.environ.get(\"PROD\", \"false\") == \"true\":\n        repo = git.Repo(script_dir / Path(\"Config\"))\n        repo.git.add(all=True)\n        repo.git.commit(message=\"Deploying to builds\")\n        repo.git.push()\n\n    return release\n"
  },
  {
    "path": "builder.py",
    "content": "import io\nimport plistlib\nimport shutil\nimport stat\nimport subprocess\nimport zipfile\nfrom os import chdir\nfrom pathlib import Path\n\nfrom hammock import Hammock as hammock\n\n\nclass Builder:\n    def __init__(self):\n        self.lilu = {}\n        self.clang32 = None\n        self.edk2 = None\n        self.script_dir = Path(__file__).parent.absolute()\n\n        self.working_dir = self.script_dir / Path(\"Temp\")\n        if self.working_dir.exists():\n            shutil.rmtree(self.working_dir)\n        self.working_dir.mkdir()\n\n        self.build_dir = self.script_dir / Path(\"Builds\")\n        if self.build_dir.exists():\n            shutil.rmtree(self.build_dir)\n        self.build_dir.mkdir()\n\n    @staticmethod\n    def _expand_globs(p: str):\n        if \"*\" in p:\n            path = Path(p)\n            parts = path.parts[1:] if path.is_absolute() else path.parts\n            return list(Path(path.root).glob(str(Path(\"\").joinpath(*parts))))\n        else:\n            return [Path(p)]\n\n    def _bootstrap_clang32(self, target_dir: Path):\n        chdir(self.working_dir)\n        clang_dir = self.working_dir / Path(\"clang32\")\n\n        if not self.clang32:\n            print(\"Bootstrapping prerequisite: clang32...\")\n            if clang_dir.exists():\n                shutil.rmtree(clang_dir)\n            clang_dir.mkdir()\n            chdir(clang_dir)\n            print(\"\\tDownloading clang32 binary...\")\n            zipfile.ZipFile(io.BytesIO(hammock(\"https://github.com/acidanthera/ocbuild/releases/download/llvm-kext32-latest/clang-12.zip\").GET().content)).extractall()\n            (clang_dir / Path(\"clang-12\")).chmod((clang_dir / Path(\"clang-12\")).stat().st_mode | stat.S_IEXEC)\n\n            print(\"\\tDownloading clang32 scripts...\")\n            for tool in [\"fix-macho32\", \"libtool32\"]:\n                tool_path = Path(tool)\n                tool_path.write_bytes(hammock(f\"https://raw.githubusercontent.com/acidanthera/ocbuild/master/scripts/{tool}\").GET().content)\n                tool_path.chmod(tool_path.stat().st_mode | stat.S_IEXEC)\n            self.clang32 = clang_dir.resolve()\n        (target_dir / Path(\"clang32\")).symlink_to(self.clang32)\n\n    def _bootstrap_edk2(self):\n        chdir(self.working_dir)\n        if not self.edk2:\n            print(\"Bootstrapping prerequisite: EDK II...\")\n            if Path(\"edk2\").exists():\n                shutil.rmtree(Path(\"edk2\"))\n            print(\"\\tCloning the repo...\")\n            result = subprocess.run(\"git clone https://github.com/acidanthera/audk edk2 --branch master --depth 1\".split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tClone failed!\")\n                print(result.stdout.decode())\n                return False\n            self.edk2 = True\n\n    def _build_lilu(self):\n        chdir(self.working_dir)\n        if not self.lilu:\n            print(\"Building prerequiste: Lilu...\")\n            if Path(\"Lilu\").exists():\n                shutil.rmtree(Path(\"Lilu\"))\n            print(\"\\tCloning the repo...\")\n            result = subprocess.run(\"git clone https://github.com/acidanthera/Lilu.git\".split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tClone failed!\")\n                print(result.stdout.decode())\n                return False\n            chdir(self.working_dir / Path(\"Lilu\"))\n            print(\"\\tCloning MacKernelSDK...\")\n            result = subprocess.run(\"git clone https://github.com/acidanthera/MacKernelSDK.git\".split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tClone of MacKernelSDK failed!\")\n                print(result.stdout.decode())\n                return False\n            self._bootstrap_clang32(self.working_dir / Path(\"Lilu\"))\n            chdir(self.working_dir / Path(\"Lilu\"))\n            print(\"\\tBuilding debug version...\")\n            result = subprocess.run(\"xcodebuild -quiet -configuration Debug\".split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tBuild failed!\")\n                print(result.stdout.decode())\n                return False\n            result = subprocess.run(\"git rev-parse HEAD\".split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tObtaining commit hash failed!\")\n                print(result.stdout.decode())\n                return False\n            else:\n                commithash = result.stdout.decode().strip()\n            shutil.copytree(Path(\"build/Debug/Lilu.kext\"), self.working_dir / Path(\"Lilu.kext\"))\n            self.lilu = [commithash, self.working_dir / Path(\"Lilu.kext\")]\n        return self.lilu[1]\n\n    def build(self, plugin, commithash=None):\n        name = plugin[\"Name\"]\n        url = plugin[\"URL\"]\n        needs_lilu = plugin.get(\"Lilu\", False)\n        needs_mackernelsdk = plugin.get(\"MacKernelSDK\", False)\n        fat = plugin.get(\"32-bit\", False)\n        edk2 = plugin.get(\"EDK II\", False)\n        command = plugin.get(\"Command\")\n        prebuild = plugin.get(\"Pre-Build\", [])\n        postbuild = plugin.get(\"Post-Build\", [])\n        build_opts = plugin.get(\"Build Opts\", [])\n        build_dir = plugin.get(\"Build Dir\", \"build/\")\n        p_info = plugin.get(\"Info\", f\"{build_dir}Release/{name}.kext/Contents/Info.plist\")\n        b_type = plugin.get(\"Type\", \"Kext\")\n        d_file = plugin.get(\"Debug File\", f\"{build_dir}Debug/*.kext\")\n        r_file = plugin.get(\"Release File\", f\"{build_dir}Release/*.kext\")\n        extra_files = plugin.get(\"Extras\", None)\n        v_cmd = plugin.get(\"Version\", None)\n\n        chdir(self.working_dir)\n\n        if needs_lilu:\n            if not self._build_lilu():\n                print(\"Building of prerequiste: Lilu failed!\")\n                return False\n\n        chdir(self.working_dir)\n        print(\"Building \" + name + \"...\")\n        if Path(name).exists():\n            shutil.rmtree(Path(name))\n        print(\"\\tCloning the repo...\")\n        result = subprocess.run([\"git\", \"clone\", \"--recurse-submodules\", url + \".git\", name], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n        if result.returncode != 0:\n            print(\"\\tClone failed!\")\n            print(result.stdout.decode())\n            return False\n        chdir(self.working_dir / Path(name))\n\n        if commithash:\n            print(\"\\tChecking out to \" + commithash + \"...\")\n            result = subprocess.run([\"git\", \"checkout\", commithash], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tCheckout failed!\")\n                print(result.stdout.decode())\n                return False\n        else:\n            result = subprocess.run(\"git rev-parse HEAD\".split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tObtaining commit hash failed!\")\n                print(result.stdout.decode())\n                return False\n            else:\n                commithash = result.stdout.decode().strip()\n        chdir(self.working_dir / Path(name))\n\n        if needs_lilu:\n            lilu_path = self._build_lilu()\n            if not lilu_path:\n                print(\"Building of prerequiste: Lilu failed!\")\n                return False\n            shutil.copytree(lilu_path, self.working_dir / Path(name) / Path(\"Lilu.kext\"))\n\n        chdir(self.working_dir / Path(name))\n        if needs_mackernelsdk:\n            print(\"\\tCloning MacKernelSDK...\")\n            result = subprocess.run(\"git clone https://github.com/acidanthera/MacKernelSDK.git\".split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tClone of MacKernelSDK failed!\")\n                print(result.stdout.decode())\n                return False\n\n        chdir(self.working_dir / Path(name))\n        if fat:\n            self._bootstrap_clang32(self.working_dir / Path(name))\n            build_opts += [\"-arch\", \"x86_64\", \"-arch\", \"ACID32\"]\n\n        chdir(self.working_dir / Path(name))\n        if edk2:\n            self._bootstrap_edk2()\n\n        chdir(self.working_dir / Path(name))\n        if prebuild:\n            print(\"\\tRunning prebuild tasks...\")\n            for task in prebuild:\n                print(\"\\t\\tRunning task '\" + task[\"name\"] + \"'\")\n                args = [task[\"path\"]]\n                args.extend(task[\"args\"])\n                result = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n                if result.returncode != 0:\n                    print(\"\\t\\tTask failed!\")\n                    print(result.stdout.decode())\n                    return False\n                else:\n                    print(\"\\t\\tTask completed.\")\n        chdir(self.working_dir / Path(name))\n        if isinstance(command, str) or (isinstance(command, list) and all(isinstance(n, str) for n in command)):\n            print(\"\\tBuilding...\")\n            if isinstance(command, str):\n                command = command.split()\n            result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tBuild failed!\")\n                print(result.stdout.decode())\n                print(\"\\tReturn code: \" + str(result.returncode))\n                return False\n        elif isinstance(command, list) and all(isinstance(n, dict) for n in command):\n            # Multiple commands\n            for i in command:\n                print(\"\\t\" + i[\"name\"] + \"...\")\n                result = subprocess.run([i[\"path\"]] + i[\"args\"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n                if result.returncode != 0:\n                    print(\"\\tCommand failed!\")\n                    print(result.stdout.decode())\n                    print(\"\\tReturn code: \" + str(result.returncode))\n                    return False\n        else:\n            print(\"\\tBuilding release version...\")\n            args = \"xcodebuild -quiet -configuration Release\".split()\n            args += build_opts\n            args += [\"-jobs\", \"1\"]\n            # BUILD_DIR should only be added if we don't have scheme. Otherwise, use -derivedDataPath\n            args += [\"-derivedDataPath\", \"build\"] if \"-scheme\" in build_opts else [\"BUILD_DIR=build/\"]\n\n            result = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tBuild failed!\")\n                print(result.stdout.decode())\n                print(\"\\tReturn code: \" + str(result.returncode))\n                return False\n\n            print(\"\\tBuilding debug version...\")\n            args = \"xcodebuild -quiet -configuration Debug\".split()\n            args += build_opts\n            args += [\"-jobs\", \"1\"]\n            # BUILD_DIR should only be added if we don't have scheme. Otherwise, use -derivedDataPath\n            args += [\"-derivedDataPath\", \"build\"] if \"-scheme\" in build_opts else [\"BUILD_DIR=build/\"]\n\n            result = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tBuild failed!\")\n                print(result.stdout.decode())\n                print(\"\\tReturn code: \" + str(result.returncode))\n                return False\n        chdir(self.working_dir / Path(name))\n        if postbuild:\n            print(\"\\tRunning postbuild tasks...\")\n            for task in postbuild:\n                print(\"\\t\\tRunning task '\" + task[\"name\"] + \"'\")\n                args = [task[\"path\"]]\n                args.extend(task[\"args\"])\n                result = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=task.get(\"cwd\", None))\n                if result.returncode != 0:\n                    print(\"\\t\\tTask failed!\")\n                    print(result.stdout.decode())\n                    return False\n                else:\n                    print(\"\\t\\tTask completed.\")\n        chdir(self.working_dir / Path(name))\n        if v_cmd:\n            if isinstance(v_cmd, str):\n                v_cmd = v_cmd.split()\n            result = subprocess.run(v_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n            if result.returncode != 0:\n                print(\"\\tRunning version command failed!\")\n                print(result.stdout.decode())\n                return False\n            else:\n                version = result.stdout.decode().strip()\n        elif b_type == \"Kext\":\n            plistpath = Path(p_info)\n            version = plistlib.load(plistpath.open(mode=\"rb\"))[\"CFBundleVersion\"]\n        else:\n            print(\"\\tNo version command!\")\n            return False\n        print(\"\\tVersion: \" + version)\n        category_type = {\"Kext\": \"Kexts\", \"Bootloader\": \"Bootloaders\", \"Utility\": \"Utilities\", \"Other\": \"Others\"}[b_type]\n        print(\"\\tCopying to build directory...\")\n        extras = []\n        # (extras.extend(self._expand_globs(i)) for i in extra_files) if extra_files is not None else None  # pylint: disable=expression-not-assigned\n        if extra_files is not None:\n            for i in extra_files:\n                extras.extend(self._expand_globs(i))\n        debug_file = self._expand_globs(d_file)[0]\n        release_file = self._expand_globs(r_file)[0]\n        debug_dir = self.build_dir / Path(category_type) / Path(name) / Path(commithash) / Path(\"Debug\")\n        release_dir = self.build_dir / Path(category_type) / Path(name) / Path(commithash) / Path(\"Release\")\n        for directory in [debug_dir, release_dir]:\n            if directory.exists():\n                shutil.rmtree(directory)\n            directory.mkdir(parents=True)\n        if extras:\n            for i in extras:\n                if i.is_dir():\n                    print(f\"\\t{i} is a dir; please fix!\")\n                    shutil.copytree(i, debug_dir / i.name)\n                    shutil.copytree(i, release_dir / i.name)\n                elif i.is_file():\n                    shutil.copy(i, debug_dir)\n                    shutil.copy(i, release_dir)\n                elif not i.exists():\n                    print(f\"\\t{i} does not exist!\")\n                    return False\n                else:\n                    print(f\"\\t{i} is not a dir or a file!\")\n                    continue\n\n        if debug_file.is_dir():\n            print(f\"{debug_file} is a dir; please fix!\")\n            shutil.copytree(debug_file, debug_dir / debug_file.name)\n        elif debug_file.is_file():\n            shutil.copy(debug_file, debug_dir)\n\n        if release_file.is_dir():\n            print(f\"{release_file} is a dir; please fix!\")\n            shutil.copytree(release_file, release_dir / release_file.name)\n        elif release_file.is_file():\n            shutil.copy(release_file, release_dir)\n\n        return {\"debug\": debug_dir / Path(debug_file.name), \"release\": release_dir / Path(release_file.name), \"extras\": [debug_dir / Path(i.name) for i in extras], \"version\": version}\n"
  },
  {
    "path": "check_ratelimit.py",
    "content": "import sys\nfrom hammock import Hammock as hammock\n\ntoken = sys.argv[1].strip()\neee = hammock(\"https://api.github.com/rate_limit\").GET(auth=(\"github-actions\", token))\nprint(eee.text or eee.content)\n"
  },
  {
    "path": "config_mgmt.py",
    "content": "import copy\r\nimport json\r\nfrom pathlib import Path\r\n\r\n\r\ndef save_config(data: dict):\r\n    config_dir = Path(__file__).parent.absolute() / Path(\"Config\")\r\n    plugin_dir = config_dir / Path(\"plugins\")\r\n    plugin_dir.mkdir(exist_ok=True)\r\n\r\n    version = data[\"_version\"]\r\n\r\n    for plugin in data:\r\n        if plugin == \"_version\":\r\n            continue\r\n        data[plugin][\"versions\"].sort(key=lambda x: (x[\"date_committed\"], x[\"date_authored\"]), reverse=True)\r\n        json.dump(data[plugin] | {\"_version\": version}, (plugin_dir / Path(f\"{plugin}.json\")).open(\"w\"), sort_keys=True)\r\n\r\n    json.dump(data, (config_dir / Path(\"config.json\")).open(\"w\"), sort_keys=True)\r\n\r\n    latest = copy.deepcopy(data)\r\n    for plugin in latest:\r\n        if plugin == \"_version\":\r\n            continue\r\n        latest[plugin][\"versions\"] = [latest[plugin][\"versions\"][0]]\r\n\r\n    json.dump(latest, (config_dir / Path(\"latest.json\")).open(\"w\"), sort_keys=True)\r\n    json.dump({\"plugins\": list(data.keys()), \"_version\": version}, (config_dir / Path(\"plugins.json\")).open(\"w\"), sort_keys=True)\r\n"
  },
  {
    "path": "downloader.py",
    "content": "import json\nimport distutils.util\nimport zipfile\nfrom pathlib import Path\nfrom hammock import Hammock as hammock\n\nplugins = hammock(\"https://raw.githubusercontent.com/dortania/build-repo/github-actions/plugins.json\").GET()\nplugins = json.loads(plugins.text)\n\nconfig = hammock(\"https://raw.githubusercontent.com/dortania/build-repo/builds/config.json\").GET()\nconfig = json.loads(config.text)\nprint(\"Global Settings: \")\nensure_latest = bool(distutils.util.strtobool(input(\"Ensure latest? (\\\"true\\\" or \\\"false\\\") \").lower()))\nunzip = bool(distutils.util.strtobool(input(\"Unzip automatically and delete zip? (\\\"true\\\" or \\\"false\\\") \").lower()))\nextract_dir = input(\"Put files in directory (leave blank for current dir): \") if unzip else None\ndbg = input(\"Debug or release? (\\\"debug\\\" or \\\"release\\\") \").lower()\nwhile True:\n    target = input(\"Enter product to download (case sensitive): \")\n    try:\n        if ensure_latest:\n            organization = repo = None\n            for plugin in plugins[\"Plugins\"]:\n                if plugin[\"Name\"] == target:\n                    organization, repo = plugin[\"URL\"].strip().replace(\"https://github.com/\", \"\").split(\"/\")\n                    break\n            if not repo:\n                print(\"Product \" + target + \" not available\\n\")\n                continue\n            commits_url = hammock(\"https://api.github.com\").repos(organization, repo).commits.GET(params={\"per_page\": 100})\n            commit_hash = json.loads(commits_url.text or commits_url.content)[0][\"sha\"]\n            to_dl = None\n            for i in config[target][\"versions\"]:\n                if i[\"commit\"][\"sha\"] == commit_hash:\n                    to_dl = i\n                    break\n            if not to_dl:\n                print(\"Latest version (\" + commit_hash + \") unavailable\\n\")\n                continue\n        else:\n            to_dl = config[target][\"versions\"][0]\n        dl_link = to_dl[\"links\"][dbg]\n        print(f\"Downloading {target} version {to_dl['version']} sha {to_dl['commit']['sha']} and date built {to_dl['date_built']}\")\n    except KeyError as error:\n        if error.args[0] == target:\n            print(\"Product \" + error.args[0] + \" not available\\n\")\n            continue\n        elif error.args[0] == dbg:\n            print(\"Version \" + error.args[0] + \" not available\\n\")\n            continue\n        else:\n            raise error\n    file_name = Path(dl_link).name\n    dl_url = hammock(dl_link).GET()\n    Path(file_name).write_bytes(dl_url.content or dl_url.text)\n    print(\"Finished downloading.\")\n    if unzip:\n        with zipfile.ZipFile(file_name, \"r\") as zip_ref:\n            zip_ref.extractall(extract_dir)\n        Path(file_name).unlink()\n        print(\"Finished extracting.\")\n    print(\"Done.\\n\")\n"
  },
  {
    "path": "local-test.sh",
    "content": "pip3 install hammock python-dateutil datetime termcolor purl python-magic\nrm -Rf Config Temp Builds\ngit clone https://github.com/dortania/build-repo.git Config --depth 1 --single-branch --branch builds --sparse --filter=blob:none\npython3 -u check_ratelimit.py\npython3 -u updater.py\npython3 -u check_ratelimit.py\npython3 -u update_config.py"
  },
  {
    "path": "notify.py",
    "content": "import json\nimport os\nimport sys\n\nimport cryptography.fernet as fernet\nimport requests\nfrom hammock import Hammock as hammock\n\nJOB_LINK = None\n\nwebhook = sys.argv[2].strip()\nfern = fernet.Fernet(sys.argv[3].strip().encode())\n\n\ndef get_current_run_link(token):\n    global JOB_LINK\n    if JOB_LINK:\n        return JOB_LINK\n    this_run = hammock(f\"https://api.github.com/repos/{os.environ['GITHUB_REPOSITORY']}/actions/runs/{os.environ['GITHUB_RUN_ID']}/jobs\", auth=(\"github-actions\", token)).GET()\n    try:\n        this_run.raise_for_status()\n    except requests.HTTPError as err:\n        print(err)\n        return\n    this_job = [i for i in this_run.json()[\"jobs\"] if i[\"name\"] == os.environ['JOB_NAME']][0]\n    JOB_LINK = this_job[\"html_url\"]\n    return JOB_LINK\n\n\ndef notify(token, results, status):\n    if os.environ.get(\"PROD\", \"false\") == \"true\":\n        results = dict(results)\n        results[\"status\"] = status\n        results[\"job_url\"] = get_current_run_link(token)\n        if results.get(\"files\"):\n            results[\"files\"] = {k: str(v) for k, v in results[\"files\"].items()}\n\n        requests.post(webhook, data=fern.encrypt(json.dumps(results).encode()))\n\n\ndef notify_success(token, results):\n    notify(token, results, \"succeeded\")\n\n\ndef notify_failure(token, results):\n    notify(token, results, \"failed\")\n\n\ndef notify_error(token, results):\n    notify(token, results, \"errored\")\n"
  },
  {
    "path": "parallel_check.py",
    "content": "import os\nimport sys\nimport time\n\nimport requests\n\ntoken = sys.argv[1].strip()\n\nsession = requests.Session()\nsession.auth = (\"github-actions\", token)\n\nthis_run_url = f\"https://api.github.com/repos/{os.environ['GITHUB_REPOSITORY']}/actions/runs/{os.environ['GITHUB_RUN_ID']}\"\nworkflow_url = session.get(this_run_url).json()[\"workflow_url\"]\n\nruns = session.get(f\"{workflow_url}/runs\").json()\nrun_index = 0\n\nfor i, run in enumerate(runs[\"workflow_runs\"]):\n    if str(run[\"id\"]) == str(os.environ[\"GITHUB_RUN_ID\"]):\n        run_index = i\n        break\n\nfor i, run in enumerate(runs[\"workflow_runs\"]):\n    if i > run_index and str(run[\"id\"]) != str(os.environ[\"GITHUB_RUN_ID\"]) and run[\"status\"] != \"completed\":\n        print(f\"Another build ({run['id']} with status {run['status']}) is running, cancelling this one...\")\n        cancel_request = session.post(f\"{this_run_url}/cancel\")\n        if cancel_request.status_code != 202:\n            sys.exit(f\"Status code did not match: {cancel_request.status_code}\")\n        else:\n            print(\"Cancel request acknowledged, sleeping 10 seconds to account for delay...\")\n            time.sleep(10)\n            sys.exit(0)\n"
  },
  {
    "path": "plugins.json",
    "content": "{\n  \"Plugins\": [\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"An open source kernel extension providing a set of patches required for non-native Airport Broadcom Wi-Fi cards.\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"AirportBrcmFixup\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/AirportBrcmFixup\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"dynamic audio patching\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"AppleALC\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/AppleALC\"\n    },\n    {\n      \"Build Opts\": [\n        \"-target\",\n        \"Package\"\n      ],\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"An open source kernel extension which applies PatchRAM updates for Broadcom RAMUSB based devices\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"BrcmPatchRAM\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/BrcmPatchRAM\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Handler for brightness keys without DSDT patches\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"BrightnessKeys\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/BrightnessKeys\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Dynamic macOS CPU power management data injection\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"CPUFriend\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/CPUFriend\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Combines functionality of VoodooTSCSync and disabling xcpm_urgency if TSC is not in sync\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"CpuTscSync\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/CpuTscSync\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Various patches to install Rosetta cryptex\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"CryptexFixup\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/CryptexFixup\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"A Lilu plugin intended to enable debug output in the macOS kernel\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"DebugEnhancer\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/DebugEnhancer\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Allows reading Embedded Controller fields over 1 byte long\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"ECEnabler\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/1Revenger1/ECEnabler\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"SD host controller support for macOS\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"EmeraldSDHC\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/EmeraldSDHC\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Lilu Kernel extension for enabling Sidecar, NightShift, AirPlay to Mac and Universal Control support\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"FeatureUnlock\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/FeatureUnlock\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"A Lilu plugin intended to fix hibernation compatibility issues\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"HibernationFixup\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/HibernationFixup\"\n    },\n    {\n      \"Build Opts\": [\n        \"-alltargets\"\n      ],\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Intel Bluetooth Drivers for macOS\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"IntelBluetoothFirmware\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/OpenIntelWireless/IntelBluetoothFirmware\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Intel Ethernet LAN driver for macOS\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"IntelMausi\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/IntelMausi\"\n    },\n    {\n      \"32-bit\": true,\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"for arbitrary kext, library, and program patching\",\n      \"MacKernelSDK\": true,\n      \"Name\": \"Lilu\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/Lilu\"\n    },\n    {\n      \"Build Opts\": [\n        \"-target\",\n        \"Package\"\n      ],\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Hyper-V integration support for macOS\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"MacHyperVSupport\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/MacHyperVSupport\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"patches for the Apple NVMe storage driver, IONVMeFamily\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"NVMeFix\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/NVMeFix\"\n    },\n    {\n      \"Command\": [\n        {\n          \"args\": [],\n          \"name\": \"Building DuetPkg\",\n          \"path\": \"./build_duet.tool\"\n        },\n        {\n          \"args\": [],\n          \"name\": \"Building OpenCorePkg\",\n          \"path\": \"./build_oc.tool\"\n        }\n      ],\n      \"Debug File\": \"Binaries/*DEBUG*.zip\",\n      \"Desc\": \"OpenCore front end\",\n      \"Max Per Run\": 2,\n      \"Name\": \"OpenCorePkg\",\n      \"Release File\": \"Binaries/*RELEASE*.zip\",\n      \"Type\": \"Bootloader\",\n      \"URL\": \"https://github.com/acidanthera/OpenCorePkg\",\n      \"Version\": [\n        \"awk\",\n        \"/^#define OPEN_CORE_VERSION/ { print substr($3,2,5) }\",\n        \"Include/Acidanthera/Library/OcMainLib.h\"\n      ]\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"open source kernel extension providing a way to emulate some offsets in your CMOS (RTC) memory\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"RTCMemoryFixup\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/RTCMemoryFixup\"\n    },\n    {\n      \"Debug File\": \"debug.zip\",\n      \"Desc\": \"OS X open source driver for the Realtek RTL8111/8168 family\",\n      \"Name\": \"RealtekRTL8111\",\n      \"MacKernelSDK\": true,\n      \"Post-Build\": [\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"../../release.zip\",\n            \"RealtekRTL8111.kext\"\n          ],\n          \"cwd\": \"build/Release\",\n          \"name\": \"Zip Release Directory\",\n          \"path\": \"zip\"\n        },\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"../../debug.zip\",\n            \"RealtekRTL8111.kext\"\n          ],\n          \"cwd\": \"build/Debug\",\n          \"name\": \"Zip Debug Directory\",\n          \"path\": \"zip\"\n        }\n      ],\n      \"Release File\": \"release.zip\",\n      \"URL\": \"https://github.com/Mieze/RTL8111_driver_for_OS_X\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Lilu kernel extension for blocking unwanted processes and unlocking support for certain features restricted to other hardware\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"RestrictEvents\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/RestrictEvents\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Serial mouse kernel extension for macOS\",\n      \"MacKernelSDK\": true,\n      \"Name\": \"SerialMouse\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/Goldfish64/SerialMouse\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"UEFI framebuffer driver for macOS\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"UEFIGraphicsFB\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/UEFIGraphicsFB\"\n    },\n    {\n      \"32-bit\": true,\n      \"Build Opts\": [\n        \"-target\",\n        \"Package\"\n      ],\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"advanced Apple SMC emulator in the kernel\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"VirtualSMC\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/VirtualSMC\"\n    },\n    {\n      \"Build Dir\": \"build/Build/Products/\",\n      \"Build Opts\": [\n        \"-workspace\",\n        \"VoodooI2C.xcworkspace\",\n        \"-scheme\",\n        \"VoodooI2C\"\n      ],\n      \"Debug File\": \"build/Build/Products/Debug/debug.zip\",\n      \"Desc\": \"Intel I2C controller and slave device drivers for macOS\",\n      \"Extras\": [\n        \"build/Build/Products/Release/release-dSYM.zip\"\n      ],\n      \"MacKernelSDK\": true,\n      \"Name\": \"VoodooI2C\",\n      \"Post-Build\": [\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"release.zip\",\n            \".\",\n            \"-i\",\n            \"./*.kext/*\"\n          ],\n          \"cwd\": \"build/Build/Products/Release\",\n          \"name\": \"Zip Release Directory\",\n          \"path\": \"zip\"\n        },\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"release-dSYM.zip\",\n            \".\",\n            \"-i\",\n            \"./*.dSYM/*\"\n          ],\n          \"cwd\": \"build/Build/Products/Release\",\n          \"name\": \"Zip Release dSYM\",\n          \"path\": \"zip\"\n        },\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"debug.zip\",\n            \".\",\n            \"-i\",\n            \"./*.kext/*\"\n          ],\n          \"cwd\": \"build/Build/Products/Debug\",\n          \"name\": \"Zip Debug Directory\",\n          \"path\": \"zip\"\n        }\n      ],\n      \"Pre-Build\": [\n        {\n          \"args\": [\n            \"-LfsO\",\n            \"https://raw.githubusercontent.com/acidanthera/VoodooInput/master/VoodooInput/Scripts/bootstrap.sh\"\n          ],\n          \"name\": \"Download VoodooInput Bootstrap Script\",\n          \"path\": \"curl\"\n        },\n        {\n          \"args\": [\n            \"+x\",\n            \"bootstrap.sh\"\n          ],\n          \"name\": \"Make Bootstrap Executable\",\n          \"path\": \"chmod\"\n        },\n        {\n          \"args\": [],\n          \"name\": \"Run VoodooInput Bootstrap\",\n          \"path\": \"./bootstrap.sh\"\n        },\n        {\n          \"args\": [\n            \"VoodooInput\",\n            \"Dependencies/\"\n          ],\n          \"name\": \"Move VoodooInput to Dependencies\",\n          \"path\": \"mv\"\n        }\n      ],\n      \"Release File\": \"build/Build/Products/Release/release.zip\",\n      \"Type\": \"Kext\",\n      \"URL\": \"https://github.com/VoodooI2C/VoodooI2C\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Generic Multitouch Handler kernel extension for macOS\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"VoodooInput\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/VoodooInput\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"PS2 controller kext\",\n      \"Info\": \"build/Release/VoodooPS2Controller.kext/Contents/Info.plist\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"VoodooPS2\",\n      \"Pre-Build\": [\n        {\n          \"args\": [\n            \"-LfsO\",\n            \"https://raw.githubusercontent.com/acidanthera/VoodooInput/master/VoodooInput/Scripts/bootstrap.sh\"\n          ],\n          \"name\": \"Download VoodooInput Bootstrap Script\",\n          \"path\": \"curl\"\n        },\n        {\n          \"args\": [\n            \"+x\",\n            \"bootstrap.sh\"\n          ],\n          \"name\": \"Make Bootstrap Executable\",\n          \"path\": \"chmod\"\n        },\n        {\n          \"args\": [],\n          \"name\": \"Run VoodooInput Bootstrap\",\n          \"path\": \"./bootstrap.sh\"\n        }\n      ],\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/VoodooPS2\"\n    },\n    {\n      \"Command\": \"make\",\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"Refined macOS driver for ALPS TouchPads\",\n      \"Info\": \"VoodooPS2Controller.kext/Contents/Info.plist\",\n      \"Name\": \"VoodooPS2-Alps\",\n      \"Post-Build\": [\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"release.zip\",\n            \".\"\n          ],\n          \"cwd\": \"build/Release\",\n          \"name\": \"Zip Release Directory\",\n          \"path\": \"zip\"\n        },\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"debug.zip\",\n            \".\"\n          ],\n          \"cwd\": \"build/Debug\",\n          \"name\": \"Zip Debug Directory\",\n          \"path\": \"zip\"\n        }\n      ],\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/1Revenger1/VoodooPS2-Alps\"\n    },\n    {\n      \"Build Dir\": \"build/Build/Products/\",\n      \"Build Opts\": [\n        \"-scheme\",\n        \"VoodooRMI\"\n      ],\n      \"Debug File\": \"build/Build/Products/Debug/*.zip\",\n      \"Desc\": \"Synaptic Trackpad driver over SMBus/I2C for macOS\",\n      \"MacKernelSDK\": true,\n      \"Name\": \"VoodooRMI\",\n      \"Release File\": \"build/Build/Products/Release/*.zip\",\n      \"Type\": \"Kext\",\n      \"URL\": \"https://github.com/VoodooSMBus/VoodooRMI\"\n    },\n    {\n      \"Build Dir\": \"build/Build/Products/\",\n      \"Build Opts\": [\n        \"-scheme\",\n        \"VoodooSMBus\"\n      ],\n      \"Debug File\": \"build/Build/Products/Debug/debug.zip\",\n      \"Desc\": \"i2c-i801 driver port for macOS X + ELAN SMBus macOS X driver for Thinkpad T480s, L380, P52\",\n      \"Name\": \"VoodooSMBus\",\n      \"Post-Build\": [\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"release.zip\",\n            \".\",\n            \"-i\",\n            \"./*.kext/*\"\n          ],\n          \"cwd\": \"build/Build/Products/Release\",\n          \"name\": \"Zip Release Directory\",\n          \"path\": \"zip\"\n        },\n        {\n          \"args\": [\n            \"-r\",\n            \"-X\",\n            \"debug.zip\",\n            \".\",\n            \"-i\",\n            \"./*.kext/*\"\n          ],\n          \"cwd\": \"build/Build/Products/Debug\",\n          \"name\": \"Zip Debug Directory\",\n          \"path\": \"zip\"\n        }\n      ],\n      \"Release File\": \"build/Build/Products/Release/release.zip\",\n      \"Type\": \"Kext\",\n      \"URL\": \"https://github.com/VoodooSMBus/VoodooSMBus\"\n    },\n    {\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"provides patches for AMD/Nvidia/Intel GPUs\",\n      \"Lilu\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"WhateverGreen\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"URL\": \"https://github.com/acidanthera/WhateverGreen\"\n    },\n    {\n      \"Build Opts\": [\n        \"-arch\",\n        \"x86_64\",\n        \"-project\",\n        \"gfxutil.xcodeproj\",\n        \"ONLY_ACTIVE_ARCH=NO\"\n      ],\n      \"Debug File\": \"build/Debug/*.zip\",\n      \"Desc\": \"OpenCore front end\",\n      \"EDK II\": true,\n      \"MacKernelSDK\": true,\n      \"Name\": \"gfxutil\",\n      \"Release File\": \"build/Release/*.zip\",\n      \"Type\": \"Utility\",\n      \"URL\": \"https://github.com/acidanthera/gfxutil\",\n      \"Version\": [\n        \"awk\",\n        \"/^#define VERSION/ { print substr($3,2,5) }\",\n        \"main.h\"\n      ]\n    }\n  ]\n}\n"
  },
  {
    "path": "requirements.txt",
    "content": "# Builder dependencies\r\nhammock\r\npython-dateutil\r\ndatetime\r\ntermcolor2\r\npurl\r\npython-magic\r\nhumanize\r\ngitpython\r\ncryptography\r\n\r\n# For ACID32\r\nmacholib"
  },
  {
    "path": "sort_plugins.py",
    "content": "from pathlib import Path\nimport json\n\nplugins = json.load(Path(\"plugins.json\").open())\nplugins[\"Plugins\"].sort(key=lambda x: x[\"Name\"])\njson.dump(plugins, Path(\"plugins.json\").open(\"w\"), indent=2, sort_keys=True)\n"
  },
  {
    "path": "update_config.py",
    "content": "import copy\nimport json\nimport os\nimport sys\nimport urllib.parse\nfrom pathlib import Path\n\nimport dateutil.parser\nimport git\nfrom hammock import Hammock as hammock\n\nfrom config_mgmt import save_config\n\ntoken = sys.argv[1].strip()\n\n\nconfig: dict = json.load(Path(\"Config/config.json\").open())\nplugins = json.load(Path(\"plugins.json\").open())\n\n# version 2 to 3\n\nif config[\"_version\"] == 2:\n    def add_author_date(name, version):\n        if version.get(\"date_authored\", None):\n            return version\n        else:\n            organization = repo = None\n            for plugin in plugins[\"Plugins\"]:\n                if name == \"AppleSupportPkg\" or name == \"BT4LEContinuityFixup\":\n                    repo = name\n                    organization = \"acidanthera\"\n                    break\n                elif name == \"NoTouchID\":\n                    repo = name\n                    organization = \"al3xtjames\"\n                    break\n                if plugin[\"Name\"] == name:\n                    organization, repo = plugin[\"URL\"].strip().replace(\"https://github.com/\", \"\").split(\"/\")\n                    break\n            if not repo:\n                print(\"Product \" + name + \" not found\")\n                raise Exception\n            commit_date = dateutil.parser.parse(\n                json.loads(hammock(\"https://api.github.com\").repos(organization, repo).commits(version[\"commit\"][\"sha\"]).GET(auth=(\"github-actions\", token)).text)[\"commit\"][\"author\"][\"date\"]\n            )\n            version[\"date_authored\"] = commit_date.isoformat()\n            return version\n\n    config = {i: v for i, v in config.items() if not i.startswith(\"_\")}\n\n    for i in config:\n        for j, item in enumerate(config[i][\"versions\"]):\n            config[i][\"versions\"][j] = add_author_date(i, item)\n            print(f\"Added {config[i]['versions'][j]['date_authored']} for {i} {config[i]['versions'][j]['commit']['sha']}\")\n\n    for i in config:\n        for j, item in enumerate(config[i][\"versions\"]):\n            if not config[i][\"versions\"][j].get(\"date_committed\"):\n                config[i][\"versions\"][j][\"date_committed\"] = config[i][\"versions\"][j].pop(\"datecommitted\")\n            if not config[i][\"versions\"][j].get(\"date_built\"):\n                config[i][\"versions\"][j][\"date_built\"] = config[i][\"versions\"][j].pop(\"dateadded\")\n\n        config[i][\"versions\"].sort(key=lambda x: (x[\"date_committed\"], x[\"date_authored\"]), reverse=True)\n\n    config[\"_version\"] = 3\n\n# version 3 to 4\n# nothing changed, but the other json files were added\n\nif config[\"_version\"] == 3:\n    config[\"_version\"] = 4\n\nsave_config(config)\n\nif os.environ.get(\"PROD\", \"false\") == \"true\":\n    repo = git.Repo(\"Config\")\n    if repo.is_dirty(untracked_files=True):\n        repo.git.add(all=True)\n        repo.git.commit(message=\"Deploying to builds\")\n        repo.git.push()\n"
  },
  {
    "path": "updater.py",
    "content": "import datetime\nimport json\nimport os\nimport sys\nimport traceback\nfrom pathlib import Path\n\nimport dateutil.parser\nimport git\nimport humanize\nfrom hammock import Hammock as hammock\nfrom termcolor2 import c as color\n\nimport builder\nfrom add import add_built\nfrom notify import notify_error, notify_failure, notify_success\n\n\ndef matched_key_in_dict_array(array, key, value):\n    if not array:\n        return False\n    for dictionary in array:\n        if dictionary.get(key, None) == value:\n            return True\n    return False\n\n\nMAX_OUTSTANDING_COMMITS = 3\nDATE_DELTA = 7\nRETRIES_BEFORE_FAILURE = 2\n\ntheJSON = json.load(Path(\"plugins.json\").open())\nplugins = theJSON.get(\"Plugins\", [])\n\nconfig_dir = Path(\"Config\").resolve()\n\nconfig = json.load((config_dir / Path(\"config.json\")).open())\nfailures = json.load((config_dir / Path(\"failures.json\")).open())\n\n\ndef add_to_failures(plugin):\n    if not failures.get(plugin[\"plugin\"][\"Name\"]):\n        failures[plugin[\"plugin\"][\"Name\"]] = {plugin[\"commit\"][\"sha\"]: 1}\n    elif not failures[plugin[\"plugin\"][\"Name\"]].get(plugin[\"commit\"][\"sha\"]):\n        failures[plugin[\"plugin\"][\"Name\"]][plugin[\"commit\"][\"sha\"]] = 1\n    else:\n        failures[plugin[\"plugin\"][\"Name\"]][plugin[\"commit\"][\"sha\"]] += 1\n\n\nlast_updated_path = config_dir / Path(\"last_updated.txt\")\n\ninfo = []\nto_build = []\nto_add = []\n\nif last_updated_path.is_file() and last_updated_path.stat().st_size != 0:\n    date_to_compare = dateutil.parser.parse(last_updated_path.read_text())\n    last_updated_path.write_text(datetime.datetime.now(tz=datetime.timezone.utc).isoformat())\nelse:\n    last_updated_path.touch()\n    date_to_compare = datetime.datetime(2021, 3, 1, tzinfo=datetime.timezone.utc)\n    last_updated_path.write_text(date_to_compare.isoformat())\n\nprint(\"Last update date is \" + date_to_compare.isoformat())\n\ntoken = sys.argv[1].strip()\n\nfor plugin in plugins:\n    organization, repo = plugin[\"URL\"].strip().replace(\"https://github.com/\", \"\").split(\"/\")\n    base_url = hammock(\"https://api.github.com\")\n\n    releases_url = base_url.repos(organization, repo).releases.GET(auth=(\"github-actions\", token), params={\"per_page\": 100})\n    releases = json.loads(releases_url.text or releases_url.content)\n    if releases_url.headers.get(\"Link\"):\n        print(releases_url.headers[\"Link\"])\n\n    commits_url = base_url.repos(organization, repo).commits.GET(auth=(\"github-actions\", token), params={\"per_page\": 100})\n    commits = json.loads(commits_url.text or commits_url.content)\n    if releases_url.headers.get(\"Link\"):\n        print(releases_url.headers[\"Link\"])\n\n    count = 1\n\n    for commit in commits:\n        commit_date = dateutil.parser.parse(commit[\"commit\"][\"committer\"][\"date\"])\n        newer = commit_date >= date_to_compare - datetime.timedelta(days=DATE_DELTA)\n\n        if isinstance(plugin.get(\"Force\", None), str):\n            force_build = commit[\"sha\"] == plugin.get(\"Force\")\n        else:\n            force_build = plugin.get(\"Force\") and commits.index(commit) == 0\n\n        not_in_repo = True\n        for i in config.get(plugin[\"Name\"], {}).get(\"versions\", []):\n            if i[\"commit\"][\"sha\"] == commit[\"sha\"]:\n                not_in_repo = False\n\n        hit_failure_threshold = failures.get(plugin[\"Name\"], {}).get(commit[\"sha\"], 0) > RETRIES_BEFORE_FAILURE\n        within_max_outstanding = count <= plugin.get(\"Max Per Run\", MAX_OUTSTANDING_COMMITS)\n\n        # Do not build if we hit the limit for builds per run for this plugin.\n        if not within_max_outstanding:\n            continue\n\n        # Build if:\n        # Newer than last checked and not in repo, OR not in repo and latest commit\n        # AND must not have hit failure threshold (retries >= RETRIES_BEFORE_FAILURE)\n        # OR Force is set to true (ignores blacklist as this is manual intervention)\n\n        if (((newer and not_in_repo) or (not_in_repo and commits.index(commit) == 0)) and not hit_failure_threshold) or force_build:\n            if commits.index(commit) == 0:\n                print(plugin[\"Name\"] + \" by \" + organization + \" latest commit (\" + commit_date.isoformat() + \") not built\")\n            else:\n                print(plugin[\"Name\"] + \" by \" + organization + \" commit \" + commit[\"sha\"] + \" (\" + commit_date.isoformat() + \") not built\")\n            to_build.append({\"plugin\": plugin, \"commit\": commit})\n            count += 1\n        elif hit_failure_threshold:\n            print(plugin[\"Name\"] + \" by \" + organization + \" commit \" + commit[\"sha\"] + \" (\" + commit_date.isoformat() + \") has hit failure threshold!\")\n\n    for release in releases:\n        release_date = dateutil.parser.parse(release[\"created_at\"])\n        if release_date >= date_to_compare:\n            if releases.index(release) == 0:\n                print(plugin[\"Name\"] + \" by \" + organization + \" latest release (\" + release_date.isoformat() + \") not added\")\n            else:\n                print(plugin[\"Name\"] + \" by \" + organization + \" release \" + release[\"name\"] + \" (\" + release_date.isoformat() + \") not added\")\n            to_add.append({\"plugin\": plugin, \"release\": release})\n\n\n# for i in to_add: addRelease(i)\n\n\n# Start setting up builder here.\nbuilder = builder.Builder()\n\nfailed = []\nsucceeded = []\nerrored = []\n\nprint(color(f\"\\nBuilding {len(to_build)} things\").bold)\nfor plugin in to_build:\n    print(f\"\\nBuilding {color(plugin['plugin']['Name']).bold}\")\n    try:\n        started = datetime.datetime.now()\n        files = None\n        files = builder.build(plugin[\"plugin\"], commithash=plugin[\"commit\"][\"sha\"])\n    except Exception as error:\n        duration = datetime.datetime.now() - started\n\n        print(\"An error occurred!\")\n        print(error)\n        traceback.print_tb(error.__traceback__)\n        if files:\n            print(f\"Files: {files}\")\n\n        print(f\"{color('Building of').red} {color(plugin['plugin']['Name']).red.bold} {color('errored').red}\")\n        print(f\"Took {humanize.naturaldelta(duration)}\")\n        notify_error(token, plugin)\n        errored.append(plugin)\n        add_to_failures(plugin)\n        continue\n\n    duration = datetime.datetime.now() - started\n\n    if files:\n        print(f\"{color('Building of').green} {color(plugin['plugin']['Name']).green.bold} {color('succeeded').green}\")\n        print(f\"Took {humanize.naturaldelta(duration)}\")\n\n        results = plugin\n        results[\"files\"] = files\n\n        print(\"Adding to config...\")\n        results[\"config_item\"] = add_built(results, token)\n        notify_success(token, results)\n        succeeded.append(results)\n    else:\n        print(f\"{color('Building of').red} {color(plugin['plugin']['Name']).red.bold} {color('failed').red}\")\n        print(f\"Took {humanize.naturaldelta(duration)}\")\n\n        notify_failure(token, plugin)\n        failed.append(plugin)\n        add_to_failures(plugin)\n\nprint(color(f\"\\n{len(succeeded)} of {len(to_build)} built successfully\\n\").bold)\nif len(succeeded) > 0:\n    print(color(\"Succeeded:\").green)\n    for i in succeeded:\n        print(i[\"plugin\"][\"Name\"])\nif len(failed) > 0:\n    print(color(\"\\nFailed:\").red)\n    for i in failed:\n        print(i[\"plugin\"][\"Name\"])\nif len(errored) > 0:\n    print(color(\"\\nErrored:\").red)\n    for i in errored:\n        print(i[\"plugin\"][\"Name\"])\n\njson.dump(failures, (config_dir / Path(\"failures.json\")).open(\"w\"), indent=2, sort_keys=True)\n\n\nif os.environ.get(\"PROD\", \"false\") == \"true\":\n    repo = git.Repo(config_dir)\n    if repo.is_dirty(untracked_files=True):\n        repo.git.add(all=True)\n        repo.git.commit(message=\"Deploying to builds\")\n        repo.git.push()\n\n\nif len(failed) > 0 or len(errored) > 0:\n    sys.exit(10)\n"
  }
]