Repository: denoland/deployctl Branch: main Commit: 87e43e57b233 Files: 90 Total size: 287.5 KB Directory structure: gitextract_q15qzxid/ ├── .github/ │ └── workflows/ │ ├── ci.yml │ ├── on-release.yml │ ├── publish.yml │ └── test.yml ├── .vscode/ │ └── settings.json ├── LICENSE ├── README.md ├── action/ │ ├── README.md │ ├── deps.js │ ├── index.js │ ├── package.json │ ├── shim.js │ └── tests/ │ ├── README.md │ ├── always_exclude_node_modules/ │ │ └── main.ts │ ├── hello.ts │ ├── import_bomb1 │ ├── import_bomb2 │ ├── import_map.json │ └── include_exclude.ts ├── action.yml ├── deno.jsonc ├── deployctl.ts ├── examples/ │ ├── README.md │ ├── fresh/ │ │ ├── README.md │ │ ├── components/ │ │ │ └── Button.tsx │ │ ├── deno.json │ │ ├── dev.ts │ │ ├── fresh.config.ts │ │ ├── fresh.gen.ts │ │ ├── islands/ │ │ │ └── Counter.tsx │ │ ├── main.ts │ │ ├── routes/ │ │ │ ├── _404.tsx │ │ │ ├── _app.tsx │ │ │ ├── api/ │ │ │ │ └── joke.ts │ │ │ ├── greet/ │ │ │ │ └── [name].tsx │ │ │ └── index.tsx │ │ └── twind.config.ts │ ├── hello-world/ │ │ ├── deno.json │ │ └── main.ts │ └── link-shortener/ │ ├── deno.json │ └── main.ts ├── src/ │ ├── args.ts │ ├── config_file.ts │ ├── config_inference.ts │ ├── error.ts │ ├── error_test.ts │ ├── subcommands/ │ │ ├── api.ts │ │ ├── deploy.ts │ │ ├── deployments.ts │ │ ├── logs.ts │ │ ├── logs_test.ts │ │ ├── projects.ts │ │ ├── top.ts │ │ └── upgrade.ts │ ├── utils/ │ │ ├── access_token.ts │ │ ├── api.ts │ │ ├── api_types.ts │ │ ├── crons.ts │ │ ├── entrypoint.ts │ │ ├── env_vars.ts │ │ ├── hashing_encoding.ts │ │ ├── info.ts │ │ ├── manifest.ts │ │ ├── manifest_test.ts │ │ ├── manifest_testdata/ │ │ │ ├── complex/ │ │ │ │ ├── a.txt │ │ │ │ ├── inner1/ │ │ │ │ │ └── b.txt │ │ │ │ └── inner2/ │ │ │ │ └── b.txt │ │ │ ├── single_file/ │ │ │ │ └── a.txt │ │ │ └── two_levels/ │ │ │ ├── a.txt │ │ │ └── inner/ │ │ │ └── b.txt │ │ ├── mod.ts │ │ ├── organization.ts │ │ ├── spinner.ts │ │ ├── time.ts │ │ ├── time_test.ts │ │ ├── token_storage/ │ │ │ ├── darwin.ts │ │ │ ├── fs.ts │ │ │ └── memory.ts │ │ └── token_storage.ts │ └── version.ts ├── tests/ │ ├── config_file_test/ │ │ ├── config.json │ │ ├── config_file_test.ts │ │ └── config_with_include.json │ ├── env_vars_test/ │ │ ├── .another-env │ │ ├── .overlapping-env │ │ └── env_vars_test.ts │ ├── help_test.ts │ └── utils.ts └── tools/ ├── bundle.ts └── version_match.ts ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/workflows/ci.yml ================================================ name: ci on: push: branches: [main] pull_request: branches: [main] jobs: test: name: test-${{ matrix.os }}-${{ matrix.deno }} runs-on: ${{ matrix.os }} strategy: matrix: # Test on the oldest supported, the latest stable, and nightly deno: [old, stable, canary] os: [macOS-latest, windows-latest, ubuntu-latest] steps: # Some test cases are sensitive to line endings. Disable autocrlf on # Windows to ensure consistent behavior. - name: Disable autocrlf if: runner.os == 'Windows' run: git config --global core.autocrlf false - name: Setup repo uses: actions/checkout@v3 - name: Setup Deno uses: denoland/setup-deno@v1 with: # Make sure to keep this in sync with the one defined in version.ts. # Also don't forget to update README.md. deno-version: ${{ matrix.deno == 'old' && '1.46.0' || (matrix.deno == 'stable' && '2.x' || matrix.deno) }} - run: deno --version - name: Format if: runner.os == 'Linux' && matrix.deno == 'stable' run: deno fmt --check - name: Lint if: runner.os == 'Linux' && matrix.deno == 'stable' run: deno lint - name: Typecheck if: runner.os == 'Linux' && matrix.deno == 'stable' run: deno check deployctl.ts # Skip temporarily (see https://github.com/denoland/deployctl/actions/runs/11500790181/job/32011870448?pr=342#step:8:148) # - name: action/deps.js up-to-date # if: runner.os == 'Linux' && matrix.deno == 'stable' # run: | # # @deno/emit doesn't work if JSR modules are not in the cache. # # This is a workaround to cache the JSR modules beforehand. # deno cache ./src/utils/mod.ts # deno run --allow-read --allow-env --allow-net ./tools/bundle.ts ./src/utils/mod.ts > ./action/latest.deps.js # diff ./action/latest.deps.js ./action/deps.js - name: Run tests # Deno 1.x does not support lockfile v4. To work around this, we append # `--no-lock` in this case. run: deno test -A ${{ matrix.deno == 'old' && '--no-lock' || '' }} tests/ src/ ================================================ FILE: .github/workflows/on-release.yml ================================================ name: Check on release on: release: types: [created] jobs: check-on-release: runs-on: [ubuntu-latest] steps: - name: Setup repo uses: actions/checkout@v3 - name: Setup Deno uses: denoland/setup-deno@v1 - name: check version match run: deno task version-match env: RELEASE_TAG: ${{ github.event.release.tag_name }} ================================================ FILE: .github/workflows/publish.yml ================================================ name: Publish on: push: branches: - main jobs: publish: runs-on: ubuntu-latest permissions: contents: read id-token: write # The OIDC ID token is used for authentication with JSR. steps: - uses: actions/checkout@v4 - run: npx jsr publish ================================================ FILE: .github/workflows/test.yml ================================================ name: test on: push: branches: [main] pull_request: branches: [main] jobs: test: name: test-action runs-on: ubuntu-latest permissions: id-token: write contents: read steps: - name: Checkout repository uses: actions/checkout@v3 - name: Deploy to Deno Deploy uses: ./ with: project: happy-rat-64 root: action/tests entrypoint: hello.ts import-map: ./import_map.json - name: Deploy with single include uses: ./ with: project: happy-rat-64 root: action/tests entrypoint: include_exclude.ts include: include_exclude.ts - name: Deploy with comma-separated include uses: ./ with: project: happy-rat-64 root: action entrypoint: tests/include_exclude.ts include: foo, tests/include_exclude.ts,bar - name: Deploy with comma-separated exclude uses: ./ with: project: happy-rat-64 root: action/tests entrypoint: include_exclude.ts exclude: import_bomb1,import_bomb2 - name: Deploy with multiline exclude uses: ./ with: project: happy-rat-64 root: action/tests entrypoint: include_exclude.ts exclude: | import_bomb1 import_bomb2 - name: Deploy combine include and exclude uses: ./ with: project: happy-rat-64 root: action entrypoint: tests/include_exclude.ts include: tests exclude: | tests/import_bomb1 tests/import_bomb2 - name: Always exclude node_modules directory uses: ./ with: project: happy-rat-64 root: action/tests/always_exclude_node_modules entrypoint: main.ts - name: Always exclude nested node_modules directory uses: ./ with: project: happy-rat-64 root: action/tests entrypoint: always_exclude_node_modules/main.ts - name: data URL entrypoint uses: ./ with: project: happy-rat-64 root: action/tests entrypoint: "data:,Deno.serve(() => new Response())" ================================================ FILE: .vscode/settings.json ================================================ { "deno.enable": true, "deno.lint": true, "[javascript]": { "editor.defaultFormatter": "denoland.vscode-deno" }, "[javascriptreact]": { "editor.defaultFormatter": "denoland.vscode-deno" }, "[typescript]": { "editor.defaultFormatter": "denoland.vscode-deno" }, "[typescriptreact]": { "editor.defaultFormatter": "denoland.vscode-deno" } } ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2021 Deno Land Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ > [!NOTE] > This tool is only used for managing [Deno Deploy Classic](https://docs.deno.com/deploy/classic/) organizations and their projects. New Deno Deploy organizations use the `deno deploy` command built into the Deno Runtime. Learn more about the new `deno deploy` command in the [reference docs](https://docs.deno.com/runtime/reference/cli/deploy/) # deployctl `deployctl` is the command line tool for Deno Deploy. This repository also contains the `denoland/deployctl` GitHub Action. ## Prerequisite You need to have Deno 1.46.0+ installed (latest version is recommended; just run `deno upgrade`) ## Install ```shell deno install -gArf jsr:@deno/deployctl ``` ## Usage The easiest way to get started with `deployctl` is to deploy one of the examples in the [examples directory](./examples): ```shell cd examples/hello-world deployctl deploy ``` Visit the [deployctl docs](https://docs.deno.com/deploy/manual/deployctl) and check out the help output to learn all you can do with deployctl: ```shell deployctl -h ``` ## Action Example ```yml name: Deploy on: push jobs: deploy: runs-on: ubuntu-latest permissions: id-token: write # This is required to allow the GitHub Action to authenticate with Deno Deploy. contents: read steps: - name: Clone repository uses: actions/checkout@v4 - name: Deploy to Deno Deploy uses: denoland/deployctl@v1 with: project: my-project # the name of the project on Deno Deploy entrypoint: main.ts # the entrypoint to deploy ``` To learn more about the action, checkout [action readme](./action/README.md). ================================================ FILE: action/README.md ================================================ # denoland/deployctl GitHub Actions for deploying to Deno Deploy. > ⚠ If your project does not require a build step, we recommend you use the > ["Automatic" deployment mode](https://docs.deno.com/deploy/manual/ci_github#automatic) > of our GitHub integration. It is faster and requires no setup. - [Usage](#usage) - [Permissions](#permissions) - [Inputs](#inputs) - [Examples](#examples) - [Deploy everything](#deploy-everything) - [Deploy a directory](#deploy-a-directory) - [Filter content with `include` and `exclude`](#filter-content-with-include-and-exclude) - [Use external or absolute path as an entrypoint](#use-external-or-absolute-path-as-an-entrypoint) - [Use import map](#use-import-map) ## Usage To deploy you just need to include the Deno Deploy GitHub Action as a step in your workflow. You do **not** need to set up any secrets for this to work. You **do** need to link your GitHub repository to your Deno Deploy project. You have to choose the "GitHub Actions" deployment mode in your project settings on https://dash.deno.com. Read [Deno Deploy documentation](https://docs.deno.com/deploy/manual/ci_github#github-action) for more information. ### Permissions You have to set `id-token: write` permission to authenticate with Deno Deploy. ```yaml jobs: deploy: permissions: id-token: write # required contents: read steps: # your steps here... ``` ### Inputs ```yaml - name: Deploy to Deno Deploy uses: denoland/deployctl@v1 with: # Name of the project on Deno Deploy # Required. project: # Entrypoint location executed by Deno Deploy # The entrypoint can be a relative path or an absolute URL. # If it is a relative path, it will be resolved relative to the `root` directory. # Required. entrypoint: # Root directory to deploy # All files and subdirectories will be deployed. # Optional. Default is "process.cwd()" root: # Filter which files to include in the deployment # It supports a single file, multiple files separated by a comma or by a newline # Optional. include: # Filter which files to exclude in the deployment # It supports a single file, multiple files separated by a comma or by a newline # Optional. exclude: # Location of an import map # Must be relative to root directory # Optional. import-map: ``` ## Examples ### Deploy everything All files and subdirectories in the **working directory** will be deployed. ```yaml - name: Deploy to Deno Deploy uses: denoland/deployctl@v1 with: project: my-project entrypoint: main.ts ``` ### Deploy a directory All files and subdirectories in the **specified directory** will be deployed. ```yaml - name: Deploy to Deno Deploy uses: denoland/deployctl@v1 with: project: my-project entrypoint: main.ts # the entrypoint is relative to the root directory (path/to/your/directory/main.ts) root: path/to/your/directory ``` ### Filter content with `include` and `exclude` Use `include` and `exclude` to filter which contents to deploy. ```yaml - name: Deploy to Deno Deploy uses: denoland/deployctl@v1 with: project: my-project entrypoint: main.ts # the entrypoint must be relative to the root directory include: | main.ts dist exclude: node_modules ``` You can set a single file ```yaml include: main.ts ``` multiple files or directories, separated by a comma ```yaml include: main.ts,dist ``` or separated by a newline ```yaml include: | main.ts dist ``` ### Use external or absolute path as an entrypoint `entrypoint` supports absolute path (`file://`) and external path (`https://`) ```yaml - name: Deploy to Deno Deploy uses: denoland/deployctl@v1 with: project: my-project entrypoint: https://your-external-path/mod.ts ``` An interesting use case is to directly use [std/http/file_server.ts](https://deno.land/std/http/file_server.ts) as suggested in [Deploy a static site](https://docs.deno.com/deploy/tutorials/static-site) tutorial. ```yaml - name: Deploy to Deno Deploy uses: denoland/deployctl@v1 with: project: my-project entrypoint: https://deno.land/std/http/file_server.ts ``` ### Use import map You can specify an [import map](https://github.com/WICG/import-maps). ```yaml - name: Deploy to Deno Deploy uses: denoland/deployctl@v1 with: project: my-project entrypoint: main.ts import-map: path/to/import-map.json ``` ================================================ FILE: action/deps.js ================================================ // deno-fmt-ignore-file // deno-lint-ignore-file // This code was bundled using `deno task build-action` and it's not recommended to edit it manually function assertPath(path) { if (typeof path !== "string") { throw new TypeError(`Path must be a string. Received ${JSON.stringify(path)}`); } } const CHAR_FORWARD_SLASH = 47; function isPathSeparator(code) { return code === 47 || code === 92; } function isWindowsDeviceRoot(code) { return code >= 97 && code <= 122 || code >= 65 && code <= 90; } function assertArg(url) { url = url instanceof URL ? url : new URL(url); if (url.protocol !== "file:") { throw new TypeError("Must be a file URL."); } return url; } function fromFileUrl(url) { url = assertArg(url); let path = decodeURIComponent(url.pathname.replace(/\//g, "\\").replace(/%(?![0-9A-Fa-f]{2})/g, "%25")).replace(/^\\*([A-Za-z]:)(\\|$)/, "$1\\"); if (url.hostname !== "") { path = `\\\\${url.hostname}${path}`; } return path; } function isAbsolute(path) { assertPath(path); const len = path.length; if (len === 0) return false; const code = path.charCodeAt(0); if (isPathSeparator(code)) { return true; } else if (isWindowsDeviceRoot(code)) { if (len > 2 && path.charCodeAt(1) === 58) { if (isPathSeparator(path.charCodeAt(2))) return true; } } return false; } class AssertionError extends Error { constructor(message){ super(message); this.name = "AssertionError"; } } function assert(expr, msg = "") { if (!expr) { throw new AssertionError(msg); } } function assertArg1(path) { assertPath(path); if (path.length === 0) return "."; } function normalizeString(path, allowAboveRoot, separator, isPathSeparator) { let res = ""; let lastSegmentLength = 0; let lastSlash = -1; let dots = 0; let code; for(let i = 0, len = path.length; i <= len; ++i){ if (i < len) code = path.charCodeAt(i); else if (isPathSeparator(code)) break; else code = CHAR_FORWARD_SLASH; if (isPathSeparator(code)) { if (lastSlash === i - 1 || dots === 1) {} else if (lastSlash !== i - 1 && dots === 2) { if (res.length < 2 || lastSegmentLength !== 2 || res.charCodeAt(res.length - 1) !== 46 || res.charCodeAt(res.length - 2) !== 46) { if (res.length > 2) { const lastSlashIndex = res.lastIndexOf(separator); if (lastSlashIndex === -1) { res = ""; lastSegmentLength = 0; } else { res = res.slice(0, lastSlashIndex); lastSegmentLength = res.length - 1 - res.lastIndexOf(separator); } lastSlash = i; dots = 0; continue; } else if (res.length === 2 || res.length === 1) { res = ""; lastSegmentLength = 0; lastSlash = i; dots = 0; continue; } } if (allowAboveRoot) { if (res.length > 0) res += `${separator}..`; else res = ".."; lastSegmentLength = 2; } } else { if (res.length > 0) res += separator + path.slice(lastSlash + 1, i); else res = path.slice(lastSlash + 1, i); lastSegmentLength = i - lastSlash - 1; } lastSlash = i; dots = 0; } else if (code === 46 && dots !== -1) { ++dots; } else { dots = -1; } } return res; } function normalize(path) { assertArg1(path); const len = path.length; let rootEnd = 0; let device; let isAbsolute = false; const code = path.charCodeAt(0); if (len > 1) { if (isPathSeparator(code)) { isAbsolute = true; if (isPathSeparator(path.charCodeAt(1))) { let j = 2; let last = j; for(; j < len; ++j){ if (isPathSeparator(path.charCodeAt(j))) break; } if (j < len && j !== last) { const firstPart = path.slice(last, j); last = j; for(; j < len; ++j){ if (!isPathSeparator(path.charCodeAt(j))) break; } if (j < len && j !== last) { last = j; for(; j < len; ++j){ if (isPathSeparator(path.charCodeAt(j))) break; } if (j === len) { return `\\\\${firstPart}\\${path.slice(last)}\\`; } else if (j !== last) { device = `\\\\${firstPart}\\${path.slice(last, j)}`; rootEnd = j; } } } } else { rootEnd = 1; } } else if (isWindowsDeviceRoot(code)) { if (path.charCodeAt(1) === 58) { device = path.slice(0, 2); rootEnd = 2; if (len > 2) { if (isPathSeparator(path.charCodeAt(2))) { isAbsolute = true; rootEnd = 3; } } } } } else if (isPathSeparator(code)) { return "\\"; } let tail; if (rootEnd < len) { tail = normalizeString(path.slice(rootEnd), !isAbsolute, "\\", isPathSeparator); } else { tail = ""; } if (tail.length === 0 && !isAbsolute) tail = "."; if (tail.length > 0 && isPathSeparator(path.charCodeAt(len - 1))) { tail += "\\"; } if (device === undefined) { if (isAbsolute) { if (tail.length > 0) return `\\${tail}`; else return "\\"; } else if (tail.length > 0) { return tail; } else { return ""; } } else if (isAbsolute) { if (tail.length > 0) return `${device}\\${tail}`; else return `${device}\\`; } else if (tail.length > 0) { return device + tail; } else { return device; } } function join(...paths) { if (paths.length === 0) return "."; let joined; let firstPart = null; for(let i = 0; i < paths.length; ++i){ const path = paths[i]; assertPath(path); if (path.length > 0) { if (joined === undefined) joined = firstPart = path; else joined += `\\${path}`; } } if (joined === undefined) return "."; let needsReplace = true; let slashCount = 0; assert(firstPart !== null); if (isPathSeparator(firstPart.charCodeAt(0))) { ++slashCount; const firstLen = firstPart.length; if (firstLen > 1) { if (isPathSeparator(firstPart.charCodeAt(1))) { ++slashCount; if (firstLen > 2) { if (isPathSeparator(firstPart.charCodeAt(2))) ++slashCount; else { needsReplace = false; } } } } } if (needsReplace) { for(; slashCount < joined.length; ++slashCount){ if (!isPathSeparator(joined.charCodeAt(slashCount))) break; } if (slashCount >= 2) joined = `\\${joined.slice(slashCount)}`; } return normalize(joined); } function resolve(...pathSegments) { let resolvedDevice = ""; let resolvedTail = ""; let resolvedAbsolute = false; for(let i = pathSegments.length - 1; i >= -1; i--){ let path; const { Deno: Deno1 } = globalThis; if (i >= 0) { path = pathSegments[i]; } else if (!resolvedDevice) { if (typeof Deno1?.cwd !== "function") { throw new TypeError("Resolved a drive-letter-less path without a CWD."); } path = Deno1.cwd(); } else { if (typeof Deno1?.env?.get !== "function" || typeof Deno1?.cwd !== "function") { throw new TypeError("Resolved a relative path without a CWD."); } path = Deno1.cwd(); if (path === undefined || path.slice(0, 3).toLowerCase() !== `${resolvedDevice.toLowerCase()}\\`) { path = `${resolvedDevice}\\`; } } assertPath(path); const len = path.length; if (len === 0) continue; let rootEnd = 0; let device = ""; let isAbsolute = false; const code = path.charCodeAt(0); if (len > 1) { if (isPathSeparator(code)) { isAbsolute = true; if (isPathSeparator(path.charCodeAt(1))) { let j = 2; let last = j; for(; j < len; ++j){ if (isPathSeparator(path.charCodeAt(j))) break; } if (j < len && j !== last) { const firstPart = path.slice(last, j); last = j; for(; j < len; ++j){ if (!isPathSeparator(path.charCodeAt(j))) break; } if (j < len && j !== last) { last = j; for(; j < len; ++j){ if (isPathSeparator(path.charCodeAt(j))) break; } if (j === len) { device = `\\\\${firstPart}\\${path.slice(last)}`; rootEnd = j; } else if (j !== last) { device = `\\\\${firstPart}\\${path.slice(last, j)}`; rootEnd = j; } } } } else { rootEnd = 1; } } else if (isWindowsDeviceRoot(code)) { if (path.charCodeAt(1) === 58) { device = path.slice(0, 2); rootEnd = 2; if (len > 2) { if (isPathSeparator(path.charCodeAt(2))) { isAbsolute = true; rootEnd = 3; } } } } } else if (isPathSeparator(code)) { rootEnd = 1; isAbsolute = true; } if (device.length > 0 && resolvedDevice.length > 0 && device.toLowerCase() !== resolvedDevice.toLowerCase()) { continue; } if (resolvedDevice.length === 0 && device.length > 0) { resolvedDevice = device; } if (!resolvedAbsolute) { resolvedTail = `${path.slice(rootEnd)}\\${resolvedTail}`; resolvedAbsolute = isAbsolute; } if (resolvedAbsolute && resolvedDevice.length > 0) break; } resolvedTail = normalizeString(resolvedTail, !resolvedAbsolute, "\\", isPathSeparator); return resolvedDevice + (resolvedAbsolute ? "\\" : "") + resolvedTail || "."; } const WHITESPACE_ENCODINGS = { "\u0009": "%09", "\u000A": "%0A", "\u000B": "%0B", "\u000C": "%0C", "\u000D": "%0D", "\u0020": "%20" }; function encodeWhitespace(string) { return string.replaceAll(/[\s]/g, (c)=>{ return WHITESPACE_ENCODINGS[c] ?? c; }); } function toFileUrl(path) { if (!isAbsolute(path)) { throw new TypeError("Must be an absolute path."); } const [, hostname, pathname] = path.match(/^(?:[/\\]{2}([^/\\]+)(?=[/\\](?:[^/\\]|$)))?(.*)/); const url = new URL("file:///"); url.pathname = encodeWhitespace(pathname.replace(/%/g, "%25")); if (hostname !== undefined && hostname !== "localhost") { url.hostname = hostname; if (!url.hostname) { throw new TypeError("Invalid hostname."); } } return url; } const regExpEscapeChars = [ "!", "$", "(", ")", "*", "+", ".", "=", "?", "[", "\\", "^", "{", "|" ]; const rangeEscapeChars = [ "-", "\\", "]" ]; function _globToRegExp(c, glob, { extended = true, globstar: globstarOption = true, caseInsensitive = false } = {}) { if (glob === "") { return /(?!)/; } let newLength = glob.length; for(; newLength > 1 && c.seps.includes(glob[newLength - 1]); newLength--); glob = glob.slice(0, newLength); let regExpString = ""; for(let j = 0; j < glob.length;){ let segment = ""; const groupStack = []; let inRange = false; let inEscape = false; let endsWithSep = false; let i = j; for(; i < glob.length && !c.seps.includes(glob[i]); i++){ if (inEscape) { inEscape = false; const escapeChars = inRange ? rangeEscapeChars : regExpEscapeChars; segment += escapeChars.includes(glob[i]) ? `\\${glob[i]}` : glob[i]; continue; } if (glob[i] === c.escapePrefix) { inEscape = true; continue; } if (glob[i] === "[") { if (!inRange) { inRange = true; segment += "["; if (glob[i + 1] === "!") { i++; segment += "^"; } else if (glob[i + 1] === "^") { i++; segment += "\\^"; } continue; } else if (glob[i + 1] === ":") { let k = i + 1; let value = ""; while(glob[k + 1] !== undefined && glob[k + 1] !== ":"){ value += glob[k + 1]; k++; } if (glob[k + 1] === ":" && glob[k + 2] === "]") { i = k + 2; if (value === "alnum") segment += "\\dA-Za-z"; else if (value === "alpha") segment += "A-Za-z"; else if (value === "ascii") segment += "\x00-\x7F"; else if (value === "blank") segment += "\t "; else if (value === "cntrl") segment += "\x00-\x1F\x7F"; else if (value === "digit") segment += "\\d"; else if (value === "graph") segment += "\x21-\x7E"; else if (value === "lower") segment += "a-z"; else if (value === "print") segment += "\x20-\x7E"; else if (value === "punct") { segment += "!\"#$%&'()*+,\\-./:;<=>?@[\\\\\\]^_‘{|}~"; } else if (value === "space") segment += "\\s\v"; else if (value === "upper") segment += "A-Z"; else if (value === "word") segment += "\\w"; else if (value === "xdigit") segment += "\\dA-Fa-f"; continue; } } } if (glob[i] === "]" && inRange) { inRange = false; segment += "]"; continue; } if (inRange) { if (glob[i] === "\\") { segment += `\\\\`; } else { segment += glob[i]; } continue; } if (glob[i] === ")" && groupStack.length > 0 && groupStack[groupStack.length - 1] !== "BRACE") { segment += ")"; const type = groupStack.pop(); if (type === "!") { segment += c.wildcard; } else if (type !== "@") { segment += type; } continue; } if (glob[i] === "|" && groupStack.length > 0 && groupStack[groupStack.length - 1] !== "BRACE") { segment += "|"; continue; } if (glob[i] === "+" && extended && glob[i + 1] === "(") { i++; groupStack.push("+"); segment += "(?:"; continue; } if (glob[i] === "@" && extended && glob[i + 1] === "(") { i++; groupStack.push("@"); segment += "(?:"; continue; } if (glob[i] === "?") { if (extended && glob[i + 1] === "(") { i++; groupStack.push("?"); segment += "(?:"; } else { segment += "."; } continue; } if (glob[i] === "!" && extended && glob[i + 1] === "(") { i++; groupStack.push("!"); segment += "(?!"; continue; } if (glob[i] === "{") { groupStack.push("BRACE"); segment += "(?:"; continue; } if (glob[i] === "}" && groupStack[groupStack.length - 1] === "BRACE") { groupStack.pop(); segment += ")"; continue; } if (glob[i] === "," && groupStack[groupStack.length - 1] === "BRACE") { segment += "|"; continue; } if (glob[i] === "*") { if (extended && glob[i + 1] === "(") { i++; groupStack.push("*"); segment += "(?:"; } else { const prevChar = glob[i - 1]; let numStars = 1; while(glob[i + 1] === "*"){ i++; numStars++; } const nextChar = glob[i + 1]; if (globstarOption && numStars === 2 && [ ...c.seps, undefined ].includes(prevChar) && [ ...c.seps, undefined ].includes(nextChar)) { segment += c.globstar; endsWithSep = true; } else { segment += c.wildcard; } } continue; } segment += regExpEscapeChars.includes(glob[i]) ? `\\${glob[i]}` : glob[i]; } if (groupStack.length > 0 || inRange || inEscape) { segment = ""; for (const c of glob.slice(j, i)){ segment += regExpEscapeChars.includes(c) ? `\\${c}` : c; endsWithSep = false; } } regExpString += segment; if (!endsWithSep) { regExpString += i < glob.length ? c.sep : c.sepMaybe; endsWithSep = true; } while(c.seps.includes(glob[i]))i++; if (!(i > j)) { throw new Error("Assertion failure: i > j (potential infinite loop)"); } j = i; } regExpString = `^${regExpString}$`; return new RegExp(regExpString, caseInsensitive ? "i" : ""); } const constants = { sep: "(?:\\\\|/)+", sepMaybe: "(?:\\\\|/)*", seps: [ "\\", "/" ], globstar: "(?:[^\\\\/]*(?:\\\\|/|$)+)*", wildcard: "[^\\\\/]*", escapePrefix: "`" }; function globToRegExp(glob, options = {}) { return _globToRegExp(constants, glob, options); } function isGlob(str) { const chars = { "{": "}", "(": ")", "[": "]" }; const regex = /\\(.)|(^!|\*|\?|[\].+)]\?|\[[^\\\]]+\]|\{[^\\}]+\}|\(\?[:!=][^\\)]+\)|\([^|]+\|[^\\)]+\))/; if (str === "") { return false; } let match; while(match = regex.exec(str)){ if (match[2]) return true; let idx = match.index + match[0].length; const open = match[1]; const close = open ? chars[open] : null; if (open && close) { const n = str.indexOf(close, idx); if (n !== -1) { idx = n + 1; } } str = str.slice(idx); } return false; } function isPosixPathSeparator(code) { return code === 47; } function fromFileUrl1(url) { url = assertArg(url); return decodeURIComponent(url.pathname.replace(/%(?![0-9A-Fa-f]{2})/g, "%25")); } function isAbsolute1(path) { assertPath(path); return path.length > 0 && isPosixPathSeparator(path.charCodeAt(0)); } function normalize1(path) { assertArg1(path); const isAbsolute = isPosixPathSeparator(path.charCodeAt(0)); const trailingSeparator = isPosixPathSeparator(path.charCodeAt(path.length - 1)); path = normalizeString(path, !isAbsolute, "/", isPosixPathSeparator); if (path.length === 0 && !isAbsolute) path = "."; if (path.length > 0 && trailingSeparator) path += "/"; if (isAbsolute) return `/${path}`; return path; } function join1(...paths) { if (paths.length === 0) return "."; let joined; for(let i = 0, len = paths.length; i < len; ++i){ const path = paths[i]; assertPath(path); if (path.length > 0) { if (!joined) joined = path; else joined += `/${path}`; } } if (!joined) return "."; return normalize1(joined); } function resolve1(...pathSegments) { let resolvedPath = ""; let resolvedAbsolute = false; for(let i = pathSegments.length - 1; i >= -1 && !resolvedAbsolute; i--){ let path; if (i >= 0) path = pathSegments[i]; else { const { Deno: Deno1 } = globalThis; if (typeof Deno1?.cwd !== "function") { throw new TypeError("Resolved a relative path without a CWD."); } path = Deno1.cwd(); } assertPath(path); if (path.length === 0) { continue; } resolvedPath = `${path}/${resolvedPath}`; resolvedAbsolute = isPosixPathSeparator(path.charCodeAt(0)); } resolvedPath = normalizeString(resolvedPath, !resolvedAbsolute, "/", isPosixPathSeparator); if (resolvedAbsolute) { if (resolvedPath.length > 0) return `/${resolvedPath}`; else return "/"; } else if (resolvedPath.length > 0) return resolvedPath; else return "."; } function toFileUrl1(path) { if (!isAbsolute1(path)) { throw new TypeError("Must be an absolute path."); } const url = new URL("file:///"); url.pathname = encodeWhitespace(path.replace(/%/g, "%25").replace(/\\/g, "%5C")); return url; } const constants1 = { sep: "/+", sepMaybe: "/*", seps: [ "/" ], globstar: "(?:[^/]*(?:/|$)+)*", wildcard: "[^/]*", escapePrefix: "\\" }; function globToRegExp1(glob, options = {}) { return _globToRegExp(constants1, glob, options); } const osType = (()=>{ const { Deno: Deno1 } = globalThis; if (typeof Deno1?.build?.os === "string") { return Deno1.build.os; } const { navigator } = globalThis; if (navigator?.appVersion?.includes?.("Win")) { return "windows"; } return "linux"; })(); const isWindows = osType === "windows"; function fromFileUrl2(url) { return isWindows ? fromFileUrl(url) : fromFileUrl1(url); } function join2(...paths) { return isWindows ? join(...paths) : join1(...paths); } function normalize2(path) { return isWindows ? normalize(path) : normalize1(path); } function resolve2(...pathSegments) { return isWindows ? resolve(...pathSegments) : resolve1(...pathSegments); } function toFileUrl2(path) { return isWindows ? toFileUrl(path) : toFileUrl1(path); } function globToRegExp2(glob, options = {}) { return options.os === "windows" || !options.os && isWindows ? globToRegExp(glob, options) : globToRegExp1(glob, options); } const { Deno: Deno1 } = globalThis; typeof Deno1?.noColor === "boolean" ? Deno1.noColor : false; new RegExp([ "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)", "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TXZcf-nq-uy=><~]))" ].join("|"), "g"); const DEFAULT_STRINGIFY_OPTIONS = { verbose: false }; function stringify(err, options) { const opts = options === undefined ? DEFAULT_STRINGIFY_OPTIONS : { ...DEFAULT_STRINGIFY_OPTIONS, ...options }; if (err instanceof Error) { if (opts.verbose) { return stringifyErrorLong(err); } else { return stringifyErrorShort(err); } } if (typeof err === "string") { return err; } return JSON.stringify(err); } function stringifyErrorShort(err) { return `${err.name}: ${err.message}`; } function stringifyErrorLong(err) { const cause = err.cause === undefined ? "" : `\nCaused by ${stringify(err.cause, { verbose: true })}`; if (!err.stack) { return `${err.name}: ${err.message}${cause}`; } return `${err.stack}${cause}`; } async function parseEntrypoint(entrypoint, root, diagnosticName = "entrypoint") { let entrypointSpecifier; try { if (isURL(entrypoint)) { entrypointSpecifier = new URL(entrypoint); } else { entrypointSpecifier = toFileUrl2(resolve2(root ?? Deno.cwd(), entrypoint)); } } catch (err) { throw `Failed to parse ${diagnosticName} specifier '${entrypoint}': ${stringify(err)}`; } if (entrypointSpecifier.protocol === "file:") { try { await Deno.lstat(entrypointSpecifier); } catch (err) { throw `Failed to open ${diagnosticName} file at '${entrypointSpecifier}': ${stringify(err)}`; } } return entrypointSpecifier; } function isURL(entrypoint) { return entrypoint.startsWith("https://") || entrypoint.startsWith("http://") || entrypoint.startsWith("file://") || entrypoint.startsWith("data:") || entrypoint.startsWith("jsr:") || entrypoint.startsWith("npm:"); } function delay(ms, options = {}) { const { signal, persistent } = options; if (signal?.aborted) return Promise.reject(signal.reason); return new Promise((resolve, reject)=>{ const abort = ()=>{ clearTimeout(i); reject(signal?.reason); }; const done = ()=>{ signal?.removeEventListener("abort", abort); resolve(); }; const i = setTimeout(done, ms); signal?.addEventListener("abort", abort, { once: true }); if (persistent === false) { try { Deno.unrefTimer(i); } catch (error) { if (!(error instanceof ReferenceError)) { throw error; } console.error("`persistent` option is only available in Deno"); } } }); } class TextLineStream extends TransformStream { #currentLine = ""; constructor(options = { allowCR: false }){ super({ transform: (chars, controller)=>{ chars = this.#currentLine + chars; while(true){ const lfIndex = chars.indexOf("\n"); const crIndex = options.allowCR ? chars.indexOf("\r") : -1; if (crIndex !== -1 && crIndex !== chars.length - 1 && (lfIndex === -1 || lfIndex - 1 > crIndex)) { controller.enqueue(chars.slice(0, crIndex)); chars = chars.slice(crIndex + 1); continue; } if (lfIndex === -1) break; const endIndex = chars[lfIndex - 1] === "\r" ? lfIndex - 1 : lfIndex; controller.enqueue(chars.slice(0, endIndex)); chars = chars.slice(lfIndex + 1); } this.#currentLine = chars; }, flush: (controller)=>{ if (this.#currentLine === "") return; const currentLine = options.allowCR && this.#currentLine.endsWith("\r") ? this.#currentLine.slice(0, -1) : this.#currentLine; controller.enqueue(currentLine); } }); } } const VERSION = "1.13.0"; const { Deno: Deno2 } = globalThis; const noColor = typeof Deno2?.noColor === "boolean" ? Deno2.noColor : false; let enabled = !noColor; function code(open, close) { return { open: `\x1b[${open.join(";")}m`, close: `\x1b[${close}m`, regexp: new RegExp(`\\x1b\\[${close}m`, "g") }; } function run(str, code) { return enabled ? `${code.open}${str.replace(code.regexp, code.open)}${code.close}` : str; } function black(str) { return run(str, code([ 30 ], 39)); } function red(str) { return run(str, code([ 31 ], 39)); } function green(str) { return run(str, code([ 32 ], 39)); } function yellow(str) { return run(str, code([ 33 ], 39)); } function blue(str) { return run(str, code([ 34 ], 39)); } function magenta(str) { return run(str, code([ 35 ], 39)); } function cyan(str) { return run(str, code([ 36 ], 39)); } function white(str) { return run(str, code([ 37 ], 39)); } function gray(str) { return brightBlack(str); } function brightBlack(str) { return run(str, code([ 90 ], 39)); } new RegExp([ "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)", "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TXZcf-nq-uy=><~]))" ].join("|"), "g"); const encoder = new TextEncoder(); function encode(input) { return encoder.encode(input); } const __default = [ [ 0x0300, 0x036f ], [ 0x0483, 0x0486 ], [ 0x0488, 0x0489 ], [ 0x0591, 0x05bd ], [ 0x05bf, 0x05bf ], [ 0x05c1, 0x05c2 ], [ 0x05c4, 0x05c5 ], [ 0x05c7, 0x05c7 ], [ 0x0600, 0x0603 ], [ 0x0610, 0x0615 ], [ 0x064b, 0x065e ], [ 0x0670, 0x0670 ], [ 0x06d6, 0x06e4 ], [ 0x06e7, 0x06e8 ], [ 0x06ea, 0x06ed ], [ 0x070f, 0x070f ], [ 0x0711, 0x0711 ], [ 0x0730, 0x074a ], [ 0x07a6, 0x07b0 ], [ 0x07eb, 0x07f3 ], [ 0x0901, 0x0902 ], [ 0x093c, 0x093c ], [ 0x0941, 0x0948 ], [ 0x094d, 0x094d ], [ 0x0951, 0x0954 ], [ 0x0962, 0x0963 ], [ 0x0981, 0x0981 ], [ 0x09bc, 0x09bc ], [ 0x09c1, 0x09c4 ], [ 0x09cd, 0x09cd ], [ 0x09e2, 0x09e3 ], [ 0x0a01, 0x0a02 ], [ 0x0a3c, 0x0a3c ], [ 0x0a41, 0x0a42 ], [ 0x0a47, 0x0a48 ], [ 0x0a4b, 0x0a4d ], [ 0x0a70, 0x0a71 ], [ 0x0a81, 0x0a82 ], [ 0x0abc, 0x0abc ], [ 0x0ac1, 0x0ac5 ], [ 0x0ac7, 0x0ac8 ], [ 0x0acd, 0x0acd ], [ 0x0ae2, 0x0ae3 ], [ 0x0b01, 0x0b01 ], [ 0x0b3c, 0x0b3c ], [ 0x0b3f, 0x0b3f ], [ 0x0b41, 0x0b43 ], [ 0x0b4d, 0x0b4d ], [ 0x0b56, 0x0b56 ], [ 0x0b82, 0x0b82 ], [ 0x0bc0, 0x0bc0 ], [ 0x0bcd, 0x0bcd ], [ 0x0c3e, 0x0c40 ], [ 0x0c46, 0x0c48 ], [ 0x0c4a, 0x0c4d ], [ 0x0c55, 0x0c56 ], [ 0x0cbc, 0x0cbc ], [ 0x0cbf, 0x0cbf ], [ 0x0cc6, 0x0cc6 ], [ 0x0ccc, 0x0ccd ], [ 0x0ce2, 0x0ce3 ], [ 0x0d41, 0x0d43 ], [ 0x0d4d, 0x0d4d ], [ 0x0dca, 0x0dca ], [ 0x0dd2, 0x0dd4 ], [ 0x0dd6, 0x0dd6 ], [ 0x0e31, 0x0e31 ], [ 0x0e34, 0x0e3a ], [ 0x0e47, 0x0e4e ], [ 0x0eb1, 0x0eb1 ], [ 0x0eb4, 0x0eb9 ], [ 0x0ebb, 0x0ebc ], [ 0x0ec8, 0x0ecd ], [ 0x0f18, 0x0f19 ], [ 0x0f35, 0x0f35 ], [ 0x0f37, 0x0f37 ], [ 0x0f39, 0x0f39 ], [ 0x0f71, 0x0f7e ], [ 0x0f80, 0x0f84 ], [ 0x0f86, 0x0f87 ], [ 0x0f90, 0x0f97 ], [ 0x0f99, 0x0fbc ], [ 0x0fc6, 0x0fc6 ], [ 0x102d, 0x1030 ], [ 0x1032, 0x1032 ], [ 0x1036, 0x1037 ], [ 0x1039, 0x1039 ], [ 0x1058, 0x1059 ], [ 0x1160, 0x11ff ], [ 0x135f, 0x135f ], [ 0x1712, 0x1714 ], [ 0x1732, 0x1734 ], [ 0x1752, 0x1753 ], [ 0x1772, 0x1773 ], [ 0x17b4, 0x17b5 ], [ 0x17b7, 0x17bd ], [ 0x17c6, 0x17c6 ], [ 0x17c9, 0x17d3 ], [ 0x17dd, 0x17dd ], [ 0x180b, 0x180d ], [ 0x18a9, 0x18a9 ], [ 0x1920, 0x1922 ], [ 0x1927, 0x1928 ], [ 0x1932, 0x1932 ], [ 0x1939, 0x193b ], [ 0x1a17, 0x1a18 ], [ 0x1b00, 0x1b03 ], [ 0x1b34, 0x1b34 ], [ 0x1b36, 0x1b3a ], [ 0x1b3c, 0x1b3c ], [ 0x1b42, 0x1b42 ], [ 0x1b6b, 0x1b73 ], [ 0x1dc0, 0x1dca ], [ 0x1dfe, 0x1dff ], [ 0x200b, 0x200f ], [ 0x202a, 0x202e ], [ 0x2060, 0x2063 ], [ 0x206a, 0x206f ], [ 0x20d0, 0x20ef ], [ 0x302a, 0x302f ], [ 0x3099, 0x309a ], [ 0xa806, 0xa806 ], [ 0xa80b, 0xa80b ], [ 0xa825, 0xa826 ], [ 0xfb1e, 0xfb1e ], [ 0xfe00, 0xfe0f ], [ 0xfe20, 0xfe23 ], [ 0xfeff, 0xfeff ], [ 0xfff9, 0xfffb ], [ 0x10a01, 0x10a03 ], [ 0x10a05, 0x10a06 ], [ 0x10a0c, 0x10a0f ], [ 0x10a38, 0x10a3a ], [ 0x10a3f, 0x10a3f ], [ 0x1d167, 0x1d169 ], [ 0x1d173, 0x1d182 ], [ 0x1d185, 0x1d18b ], [ 0x1d1aa, 0x1d1ad ], [ 0x1d242, 0x1d244 ], [ 0xe0001, 0xe0001 ], [ 0xe0020, 0xe007f ], [ 0xe0100, 0xe01ef ] ]; function wcswidth(str, { nul = 0, control = 0 } = {}) { const opts = { nul, control }; if (typeof str !== "string") return wcwidth(str, opts); let s = 0; for(let i = 0; i < str.length; i++){ const n = wcwidth(str.charCodeAt(i), opts); if (n < 0) return -1; s += n; } return s; } function wcwidth(ucs, { nul = 0, control = 0 } = {}) { if (ucs === 0) return nul; if (ucs < 32 || ucs >= 0x7f && ucs < 0xa0) return control; if (bisearch(ucs)) return 0; return 1 + (ucs >= 0x1100 && (ucs <= 0x115f || ucs == 0x2329 || ucs == 0x232a || ucs >= 0x2e80 && ucs <= 0xa4cf && ucs != 0x303f || ucs >= 0xac00 && ucs <= 0xd7a3 || ucs >= 0xf900 && ucs <= 0xfaff || ucs >= 0xfe10 && ucs <= 0xfe19 || ucs >= 0xfe30 && ucs <= 0xfe6f || ucs >= 0xff00 && ucs <= 0xff60 || ucs >= 0xffe0 && ucs <= 0xffe6 || ucs >= 0x20000 && ucs <= 0x2fffd || ucs >= 0x30000 && ucs <= 0x3fffd) ? 1 : 0); } function bisearch(ucs) { let min = 0; let max = __default.length - 1; let mid; if (ucs < __default[0][0] || ucs > __default[max][1]) return false; while(max >= min){ mid = Math.floor((min + max) / 2); if (ucs > __default[mid][1]) min = mid + 1; else if (ucs < __default[mid][0]) max = mid - 1; else return true; } return false; } function ansiRegex({ onlyFirst = false } = {}) { const pattern = [ "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)", "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))" ].join("|"); return new RegExp(pattern, onlyFirst ? undefined : "g"); } function isInteractive(stream) { return stream.isTerminal(); } (await Deno.permissions.query({ name: "env" })).state === "granted" ? Deno.env.get("TERM_PROGRAM") === "Apple_Terminal" : false; function writeSync(str, writer) { writer.writeSync(encode(str)); } function stripAnsi(dirty) { return dirty.replace(ansiRegex(), ""); } const ESC = "\u001B["; const HIDE = "?25l"; const SHOW = "?25h"; const UP = "A"; const RIGHT = "C"; const CLEAR_LINE = "2K"; function cursorSync(action, writer = Deno.stdout) { writeSync(ESC + action, writer); } function hideCursorSync(writer = Deno.stdout) { cursorSync(HIDE, writer); } function showCursorSync(writer = Deno.stdout) { cursorSync(SHOW, writer); } function clearLineSync(writer = Deno.stdout) { cursorSync(CLEAR_LINE, writer); } function goUpSync(y = 1, writer = Deno.stdout) { cursorSync(y + UP, writer); } function goRightSync(x = 1, writer = Deno.stdout) { cursorSync(`${x}${RIGHT}`, writer); } const __default1 = { dots: { interval: 80, frames: [ "⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏" ] }, dots2: { interval: 80, frames: [ "⣾", "⣽", "⣻", "⢿", "⡿", "⣟", "⣯", "⣷" ] }, dots3: { interval: 80, frames: [ "⠋", "⠙", "⠚", "⠞", "⠖", "⠦", "⠴", "⠲", "⠳", "⠓" ] }, dots4: { interval: 80, frames: [ "⠄", "⠆", "⠇", "⠋", "⠙", "⠸", "⠰", "⠠", "⠰", "⠸", "⠙", "⠋", "⠇", "⠆" ] }, dots5: { interval: 80, frames: [ "⠋", "⠙", "⠚", "⠒", "⠂", "⠂", "⠒", "⠲", "⠴", "⠦", "⠖", "⠒", "⠐", "⠐", "⠒", "⠓", "⠋" ] }, dots6: { interval: 80, frames: [ "⠁", "⠉", "⠙", "⠚", "⠒", "⠂", "⠂", "⠒", "⠲", "⠴", "⠤", "⠄", "⠄", "⠤", "⠴", "⠲", "⠒", "⠂", "⠂", "⠒", "⠚", "⠙", "⠉", "⠁" ] }, dots7: { interval: 80, frames: [ "⠈", "⠉", "⠋", "⠓", "⠒", "⠐", "⠐", "⠒", "⠖", "⠦", "⠤", "⠠", "⠠", "⠤", "⠦", "⠖", "⠒", "⠐", "⠐", "⠒", "⠓", "⠋", "⠉", "⠈" ] }, dots8: { interval: 80, frames: [ "⠁", "⠁", "⠉", "⠙", "⠚", "⠒", "⠂", "⠂", "⠒", "⠲", "⠴", "⠤", "⠄", "⠄", "⠤", "⠠", "⠠", "⠤", "⠦", "⠖", "⠒", "⠐", "⠐", "⠒", "⠓", "⠋", "⠉", "⠈", "⠈" ] }, dots9: { interval: 80, frames: [ "⢹", "⢺", "⢼", "⣸", "⣇", "⡧", "⡗", "⡏" ] }, dots10: { interval: 80, frames: [ "⢄", "⢂", "⢁", "⡁", "⡈", "⡐", "⡠" ] }, dots11: { interval: 100, frames: [ "⠁", "⠂", "⠄", "⡀", "⢀", "⠠", "⠐", "⠈" ] }, dots12: { interval: 80, frames: [ "⢀⠀", "⡀⠀", "⠄⠀", "⢂⠀", "⡂⠀", "⠅⠀", "⢃⠀", "⡃⠀", "⠍⠀", "⢋⠀", "⡋⠀", "⠍⠁", "⢋⠁", "⡋⠁", "⠍⠉", "⠋⠉", "⠋⠉", "⠉⠙", "⠉⠙", "⠉⠩", "⠈⢙", "⠈⡙", "⢈⠩", "⡀⢙", "⠄⡙", "⢂⠩", "⡂⢘", "⠅⡘", "⢃⠨", "⡃⢐", "⠍⡐", "⢋⠠", "⡋⢀", "⠍⡁", "⢋⠁", "⡋⠁", "⠍⠉", "⠋⠉", "⠋⠉", "⠉⠙", "⠉⠙", "⠉⠩", "⠈⢙", "⠈⡙", "⠈⠩", "⠀⢙", "⠀⡙", "⠀⠩", "⠀⢘", "⠀⡘", "⠀⠨", "⠀⢐", "⠀⡐", "⠀⠠", "⠀⢀", "⠀⡀" ] }, dots8Bit: { interval: 80, frames: [ "⠀", "⠁", "⠂", "⠃", "⠄", "⠅", "⠆", "⠇", "⡀", "⡁", "⡂", "⡃", "⡄", "⡅", "⡆", "⡇", "⠈", "⠉", "⠊", "⠋", "⠌", "⠍", "⠎", "⠏", "⡈", "⡉", "⡊", "⡋", "⡌", "⡍", "⡎", "⡏", "⠐", "⠑", "⠒", "⠓", "⠔", "⠕", "⠖", "⠗", "⡐", "⡑", "⡒", "⡓", "⡔", "⡕", "⡖", "⡗", "⠘", "⠙", "⠚", "⠛", "⠜", "⠝", "⠞", "⠟", "⡘", "⡙", "⡚", "⡛", "⡜", "⡝", "⡞", "⡟", "⠠", "⠡", "⠢", "⠣", "⠤", "⠥", "⠦", "⠧", "⡠", "⡡", "⡢", "⡣", "⡤", "⡥", "⡦", "⡧", "⠨", "⠩", "⠪", "⠫", "⠬", "⠭", "⠮", "⠯", "⡨", "⡩", "⡪", "⡫", "⡬", "⡭", "⡮", "⡯", "⠰", "⠱", "⠲", "⠳", "⠴", "⠵", "⠶", "⠷", "⡰", "⡱", "⡲", "⡳", "⡴", "⡵", "⡶", "⡷", "⠸", "⠹", "⠺", "⠻", "⠼", "⠽", "⠾", "⠿", "⡸", "⡹", "⡺", "⡻", "⡼", "⡽", "⡾", "⡿", "⢀", "⢁", "⢂", "⢃", "⢄", "⢅", "⢆", "⢇", "⣀", "⣁", "⣂", "⣃", "⣄", "⣅", "⣆", "⣇", "⢈", "⢉", "⢊", "⢋", "⢌", "⢍", "⢎", "⢏", "⣈", "⣉", "⣊", "⣋", "⣌", "⣍", "⣎", "⣏", "⢐", "⢑", "⢒", "⢓", "⢔", "⢕", "⢖", "⢗", "⣐", "⣑", "⣒", "⣓", "⣔", "⣕", "⣖", "⣗", "⢘", "⢙", "⢚", "⢛", "⢜", "⢝", "⢞", "⢟", "⣘", "⣙", "⣚", "⣛", "⣜", "⣝", "⣞", "⣟", "⢠", "⢡", "⢢", "⢣", "⢤", "⢥", "⢦", "⢧", "⣠", "⣡", "⣢", "⣣", "⣤", "⣥", "⣦", "⣧", "⢨", "⢩", "⢪", "⢫", "⢬", "⢭", "⢮", "⢯", "⣨", "⣩", "⣪", "⣫", "⣬", "⣭", "⣮", "⣯", "⢰", "⢱", "⢲", "⢳", "⢴", "⢵", "⢶", "⢷", "⣰", "⣱", "⣲", "⣳", "⣴", "⣵", "⣶", "⣷", "⢸", "⢹", "⢺", "⢻", "⢼", "⢽", "⢾", "⢿", "⣸", "⣹", "⣺", "⣻", "⣼", "⣽", "⣾", "⣿" ] }, line: { interval: 130, frames: [ "-", "\\", "|", "/" ] }, line2: { interval: 100, frames: [ "⠂", "-", "–", "—", "–", "-" ] }, pipe: { interval: 100, frames: [ "┤", "┘", "┴", "└", "├", "┌", "┬", "┐" ] }, simpleDots: { interval: 400, frames: [ ". ", ".. ", "...", " " ] }, simpleDotsScrolling: { interval: 200, frames: [ ". ", ".. ", "...", " ..", " .", " " ] }, star: { interval: 70, frames: [ "✶", "✸", "✹", "✺", "✹", "✷" ] }, star2: { interval: 80, frames: [ "+", "x", "*" ] }, flip: { interval: 70, frames: [ "_", "_", "_", "-", "`", "`", "'", "´", "-", "_", "_", "_" ] }, hamburger: { interval: 100, frames: [ "☱", "☲", "☴" ] }, growVertical: { interval: 120, frames: [ "▁", "▃", "▄", "▅", "▆", "▇", "▆", "▅", "▄", "▃" ] }, growHorizontal: { interval: 120, frames: [ "▏", "▎", "▍", "▌", "▋", "▊", "▉", "▊", "▋", "▌", "▍", "▎" ] }, balloon: { interval: 140, frames: [ " ", ".", "o", "O", "@", "*", " " ] }, balloon2: { interval: 120, frames: [ ".", "o", "O", "°", "O", "o", "." ] }, noise: { interval: 100, frames: [ "▓", "▒", "░" ] }, bounce: { interval: 120, frames: [ "⠁", "⠂", "⠄", "⠂" ] }, boxBounce: { interval: 120, frames: [ "▖", "▘", "▝", "▗" ] }, boxBounce2: { interval: 100, frames: [ "▌", "▀", "▐", "▄" ] }, triangle: { interval: 50, frames: [ "◢", "◣", "◤", "◥" ] }, arc: { interval: 100, frames: [ "◜", "◠", "◝", "◞", "◡", "◟" ] }, circle: { interval: 120, frames: [ "◡", "⊙", "◠" ] }, squareCorners: { interval: 180, frames: [ "◰", "◳", "◲", "◱" ] }, circleQuarters: { interval: 120, frames: [ "◴", "◷", "◶", "◵" ] }, circleHalves: { interval: 50, frames: [ "◐", "◓", "◑", "◒" ] }, squish: { interval: 100, frames: [ "╫", "╪" ] }, toggle: { interval: 250, frames: [ "⊶", "⊷" ] }, toggle2: { interval: 80, frames: [ "▫", "▪" ] }, toggle3: { interval: 120, frames: [ "□", "■" ] }, toggle4: { interval: 100, frames: [ "■", "□", "▪", "▫" ] }, toggle5: { interval: 100, frames: [ "▮", "▯" ] }, toggle6: { interval: 300, frames: [ "ဝ", "၀" ] }, toggle7: { interval: 80, frames: [ "⦾", "⦿" ] }, toggle8: { interval: 100, frames: [ "◍", "◌" ] }, toggle9: { interval: 100, frames: [ "◉", "◎" ] }, toggle10: { interval: 100, frames: [ "㊂", "㊀", "㊁" ] }, toggle11: { interval: 50, frames: [ "⧇", "⧆" ] }, toggle12: { interval: 120, frames: [ "☗", "☖" ] }, toggle13: { interval: 80, frames: [ "=", "*", "-" ] }, arrow: { interval: 100, frames: [ "←", "↖", "↑", "↗", "→", "↘", "↓", "↙" ] }, arrow2: { interval: 80, frames: [ "⬆️ ", "↗️ ", "➡️ ", "↘️ ", "⬇️ ", "↙️ ", "⬅️ ", "↖️ " ] }, arrow3: { interval: 120, frames: [ "▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸" ] }, bouncingBar: { interval: 80, frames: [ "[ ]", "[= ]", "[== ]", "[=== ]", "[ ===]", "[ ==]", "[ =]", "[ ]", "[ =]", "[ ==]", "[ ===]", "[====]", "[=== ]", "[== ]", "[= ]" ] }, bouncingBall: { interval: 80, frames: [ "( ● )", "( ● )", "( ● )", "( ● )", "( ●)", "( ● )", "( ● )", "( ● )", "( ● )", "(● )" ] }, smiley: { interval: 200, frames: [ "😄 ", "😝 " ] }, monkey: { interval: 300, frames: [ "🙈 ", "🙈 ", "🙉 ", "🙊 " ] }, hearts: { interval: 100, frames: [ "💛 ", "💙 ", "💜 ", "💚 ", "❤️ " ] }, clock: { interval: 100, frames: [ "🕛 ", "🕐 ", "🕑 ", "🕒 ", "🕓 ", "🕔 ", "🕕 ", "🕖 ", "🕗 ", "🕘 ", "🕙 ", "🕚 " ] }, earth: { interval: 180, frames: [ "🌍 ", "🌎 ", "🌏 " ] }, material: { interval: 17, frames: [ "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "███████▁▁▁▁▁▁▁▁▁▁▁▁▁", "████████▁▁▁▁▁▁▁▁▁▁▁▁", "█████████▁▁▁▁▁▁▁▁▁▁▁", "█████████▁▁▁▁▁▁▁▁▁▁▁", "██████████▁▁▁▁▁▁▁▁▁▁", "███████████▁▁▁▁▁▁▁▁▁", "█████████████▁▁▁▁▁▁▁", "██████████████▁▁▁▁▁▁", "██████████████▁▁▁▁▁▁", "▁██████████████▁▁▁▁▁", "▁██████████████▁▁▁▁▁", "▁██████████████▁▁▁▁▁", "▁▁██████████████▁▁▁▁", "▁▁▁██████████████▁▁▁", "▁▁▁▁█████████████▁▁▁", "▁▁▁▁██████████████▁▁", "▁▁▁▁██████████████▁▁", "▁▁▁▁▁██████████████▁", "▁▁▁▁▁██████████████▁", "▁▁▁▁▁██████████████▁", "▁▁▁▁▁▁██████████████", "▁▁▁▁▁▁██████████████", "▁▁▁▁▁▁▁█████████████", "▁▁▁▁▁▁▁█████████████", "▁▁▁▁▁▁▁▁████████████", "▁▁▁▁▁▁▁▁████████████", "▁▁▁▁▁▁▁▁▁███████████", "▁▁▁▁▁▁▁▁▁███████████", "▁▁▁▁▁▁▁▁▁▁██████████", "▁▁▁▁▁▁▁▁▁▁██████████", "▁▁▁▁▁▁▁▁▁▁▁▁████████", "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁██████", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", "██████▁▁▁▁▁▁▁▁▁▁▁▁▁█", "████████▁▁▁▁▁▁▁▁▁▁▁▁", "█████████▁▁▁▁▁▁▁▁▁▁▁", "█████████▁▁▁▁▁▁▁▁▁▁▁", "█████████▁▁▁▁▁▁▁▁▁▁▁", "█████████▁▁▁▁▁▁▁▁▁▁▁", "███████████▁▁▁▁▁▁▁▁▁", "████████████▁▁▁▁▁▁▁▁", "████████████▁▁▁▁▁▁▁▁", "██████████████▁▁▁▁▁▁", "██████████████▁▁▁▁▁▁", "▁██████████████▁▁▁▁▁", "▁██████████████▁▁▁▁▁", "▁▁▁█████████████▁▁▁▁", "▁▁▁▁▁████████████▁▁▁", "▁▁▁▁▁████████████▁▁▁", "▁▁▁▁▁▁███████████▁▁▁", "▁▁▁▁▁▁▁▁█████████▁▁▁", "▁▁▁▁▁▁▁▁█████████▁▁▁", "▁▁▁▁▁▁▁▁▁█████████▁▁", "▁▁▁▁▁▁▁▁▁█████████▁▁", "▁▁▁▁▁▁▁▁▁▁█████████▁", "▁▁▁▁▁▁▁▁▁▁▁████████▁", "▁▁▁▁▁▁▁▁▁▁▁████████▁", "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁" ] }, moon: { interval: 80, frames: [ "🌑 ", "🌒 ", "🌓 ", "🌔 ", "🌕 ", "🌖 ", "🌗 ", "🌘 " ] }, runner: { interval: 140, frames: [ "🚶 ", "🏃 " ] }, pong: { interval: 80, frames: [ "▐⠂ ▌", "▐⠈ ▌", "▐ ⠂ ▌", "▐ ⠠ ▌", "▐ ⡀ ▌", "▐ ⠠ ▌", "▐ ⠂ ▌", "▐ ⠈ ▌", "▐ ⠂ ▌", "▐ ⠠ ▌", "▐ ⡀ ▌", "▐ ⠠ ▌", "▐ ⠂ ▌", "▐ ⠈ ▌", "▐ ⠂▌", "▐ ⠠▌", "▐ ⡀▌", "▐ ⠠ ▌", "▐ ⠂ ▌", "▐ ⠈ ▌", "▐ ⠂ ▌", "▐ ⠠ ▌", "▐ ⡀ ▌", "▐ ⠠ ▌", "▐ ⠂ ▌", "▐ ⠈ ▌", "▐ ⠂ ▌", "▐ ⠠ ▌", "▐ ⡀ ▌", "▐⠠ ▌" ] }, shark: { interval: 120, frames: [ "▐|\\____________▌", "▐_|\\___________▌", "▐__|\\__________▌", "▐___|\\_________▌", "▐____|\\________▌", "▐_____|\\_______▌", "▐______|\\______▌", "▐_______|\\_____▌", "▐________|\\____▌", "▐_________|\\___▌", "▐__________|\\__▌", "▐___________|\\_▌", "▐____________|\\▌", "▐____________/|▌", "▐___________/|_▌", "▐__________/|__▌", "▐_________/|___▌", "▐________/|____▌", "▐_______/|_____▌", "▐______/|______▌", "▐_____/|_______▌", "▐____/|________▌", "▐___/|_________▌", "▐__/|__________▌", "▐_/|___________▌", "▐/|____________▌" ] }, dqpb: { interval: 100, frames: [ "d", "q", "p", "b" ] }, weather: { interval: 100, frames: [ "☀️ ", "☀️ ", "☀️ ", "🌤 ", "⛅️ ", "🌥 ", "☁️ ", "🌧 ", "🌨 ", "🌧 ", "🌨 ", "🌧 ", "🌨 ", "⛈ ", "🌨 ", "🌧 ", "🌨 ", "☁️ ", "🌥 ", "⛅️ ", "🌤 ", "☀️ ", "☀️ " ] }, christmas: { interval: 400, frames: [ "🌲", "🎄" ] }, grenade: { interval: 80, frames: [ "، ", "′ ", " ´ ", " ‾ ", " ⸌", " ⸊", " |", " ⁎", " ⁕", " ෴ ", " ⁓", " ", " ", " " ] }, point: { interval: 125, frames: [ "∙∙∙", "●∙∙", "∙●∙", "∙∙●", "∙∙∙" ] }, layer: { interval: 150, frames: [ "-", "=", "≡" ] }, betaWave: { interval: 80, frames: [ "ρββββββ", "βρβββββ", "ββρββββ", "βββρβββ", "ββββρββ", "βββββρβ", "ββββββρ" ] } }; let supported = true; if ((await Deno.permissions.query({ name: "env" })).state === "granted") { supported = supported && (!!Deno.env.get("CI") || Deno.env.get("TERM") === "xterm-256color"); } const main = { info: blue("ℹ"), success: green("✔"), warning: yellow("⚠"), error: red("✖") }; const fallbacks = { info: blue("i"), success: green("√"), warning: yellow("‼"), error: red("×") }; const symbols = supported ? main : fallbacks; const encoder1 = new TextEncoder(); const colormap = { black: black, red: red, green: green, yellow: yellow, blue: blue, magenta: magenta, cyan: cyan, white: white, gray: gray }; function wait(opts) { if (typeof opts === "string") { opts = { text: opts }; } return new Spinner({ text: opts.text, prefix: opts.prefix ?? "", color: opts.color ?? cyan, spinner: opts.spinner ?? "dots", hideCursor: opts.hideCursor ?? true, indent: opts.indent ?? 0, interval: opts.interval ?? 100, stream: opts.stream ?? Deno.stdout, enabled: true, discardStdin: true, interceptConsole: opts.interceptConsole ?? true }); } class Spinner { #opts; isSpinning; #stream; indent; interval; #id = 0; #enabled; #frameIndex; #linesToClear; #linesCount; constructor(opts){ this.#opts = opts; this.#stream = this.#opts.stream; this.text = this.#opts.text; this.prefix = this.#opts.prefix; this.color = this.#opts.color; this.spinner = this.#opts.spinner; this.indent = this.#opts.indent; this.interval = this.#opts.interval; this.isSpinning = false; this.#frameIndex = 0; this.#linesToClear = 0; this.#linesCount = 1; this.#enabled = typeof opts.enabled === "boolean" ? opts.enabled : isInteractive(this.#stream); if (opts.hideCursor) { addEventListener("unload", ()=>{ showCursorSync(this.#stream); }); } if (opts.interceptConsole) { this.#interceptConsole(); } } #spinner = __default1.dots; #color = cyan; #text = ""; #prefix = ""; #interceptConsole() { const methods = [ "log", "debug", "info", "dir", "dirxml", "warn", "error", "assert", "count", "countReset", "table", "time", "timeLog", "timeEnd", "group", "groupCollapsed", "groupEnd", "clear", "trace", "profile", "profileEnd", "timeStamp" ]; for (const method of methods){ const original = console[method]; console[method] = (...args)=>{ if (this.isSpinning) { this.stop(); this.clear(); original(...args); this.start(); } else { original(...args); } }; } } set spinner(spin) { this.#frameIndex = 0; if (typeof spin === "string") this.#spinner = __default1[spin]; else this.#spinner = spin; } get spinner() { return this.#spinner; } set color(color) { if (typeof color === "string") this.#color = colormap[color]; else this.#color = color; } get color() { return this.#color; } set text(value) { this.#text = value; this.updateLines(); } get text() { return this.#text; } set prefix(value) { this.#prefix = value; this.updateLines(); } get prefix() { return this.#prefix; } #write(data) { this.#stream.writeSync(encoder1.encode(data)); } start() { if (!this.#enabled) { if (this.text) { this.#write(`- ${this.text}\n`); } return this; } if (this.isSpinning) return this; if (this.#opts.hideCursor) { hideCursorSync(this.#stream); } this.isSpinning = true; this.render(); this.#id = setInterval(this.render.bind(this), this.interval); return this; } render() { this.clear(); this.#write(`${this.frame()}\n`); this.updateLines(); this.#linesToClear = this.#linesCount; } frame() { const { frames } = this.#spinner; let frame = frames[this.#frameIndex]; frame = this.#color(frame); this.#frameIndex = ++this.#frameIndex % frames.length; const fullPrefixText = typeof this.prefix === "string" && this.prefix !== "" ? this.prefix + " " : ""; const fullText = typeof this.text === "string" ? " " + this.text : ""; return fullPrefixText + frame + fullText; } clear() { if (!this.#enabled) return; for(let i = 0; i < this.#linesToClear; i++){ goUpSync(1, this.#stream); clearLineSync(this.#stream); goRightSync(this.indent - 1, this.#stream); } this.#linesToClear = 0; } updateLines() { let columns = 80; try { columns = Deno.consoleSize().columns ?? columns; } catch {} const fullPrefixText = typeof this.prefix === "string" ? this.prefix + "-" : ""; this.#linesCount = stripAnsi(fullPrefixText + "--" + this.text).split("\n").reduce((count, line)=>{ return count + Math.max(1, Math.ceil(wcswidth(line) / columns)); }, 0); } stop() { if (!this.#enabled) return; clearInterval(this.#id); this.#id = -1; this.#frameIndex = 0; this.clear(); this.isSpinning = false; if (this.#opts.hideCursor) { showCursorSync(this.#stream); } } stopAndPersist(options = {}) { const prefix = options.prefix || this.prefix; const fullPrefix = typeof prefix === "string" && prefix !== "" ? prefix + " " : ""; const text = options.text || this.text; const fullText = typeof text === "string" ? " " + text : ""; this.stop(); this.#write(`${fullPrefix}${options.symbol || " "}${fullText}\n`); } succeed(text) { return this.stopAndPersist({ symbol: symbols.success, text }); } fail(text) { return this.stopAndPersist({ symbol: symbols.error, text }); } warn(text) { return this.stopAndPersist({ symbol: symbols.warning, text }); } info(text) { return this.stopAndPersist({ symbol: symbols.info, text }); } } let current = null; function wait1(param) { if (typeof param === "string") { param = { text: param }; } param.interceptConsole = false; current = wait({ stream: Deno.stderr, ...param }); return current; } function interruptSpinner() { current?.stop(); const interrupt = new Interrupt(current); current = null; return interrupt; } class Interrupt { #spinner; constructor(spinner){ this.#spinner = spinner; } resume() { current = this.#spinner; this.#spinner?.start(); } } const USER_AGENT = `DeployCTL/${VERSION} (${Deno.build.os} ${Deno.osRelease()}; ${Deno.build.arch})`; class APIError extends Error { code; xDenoRay; name = "APIError"; constructor(code, message, xDenoRay){ super(message); this.code = code; this.xDenoRay = xDenoRay; } toString() { let error = `${this.name}: ${this.message}`; if (this.xDenoRay !== null) { error += `\nx-deno-ray: ${this.xDenoRay}`; error += "\nIf you encounter this error frequently," + " contact us at deploy@deno.com with the above x-deno-ray."; } return error; } } function endpoint() { return Deno.env.get("DEPLOY_API_ENDPOINT") ?? "https://dash.deno.com"; } class API { #endpoint; #authorization; #config; constructor(authorization, endpoint, config){ this.#authorization = authorization; this.#endpoint = endpoint; const DEFAULT_CONFIG = { alwaysPrintXDenoRay: false, logger: { debug: (m)=>console.debug(m), info: (m)=>console.info(m), notice: (m)=>console.log(m), warning: (m)=>console.warn(m), error: (m)=>console.error(m) } }; this.#config = DEFAULT_CONFIG; this.#config.alwaysPrintXDenoRay = config?.alwaysPrintXDenoRay ?? DEFAULT_CONFIG.alwaysPrintXDenoRay; this.#config.logger = config?.logger ?? DEFAULT_CONFIG.logger; } static fromToken(token) { return new API(`Bearer ${token}`, endpoint()); } static withTokenProvisioner(provisioner) { return new API(provisioner, endpoint()); } async request(path, opts = {}) { const url = `${this.#endpoint}/api${path}`; const method = opts.method ?? "GET"; const body = typeof opts.body === "string" || opts.body instanceof FormData ? opts.body : JSON.stringify(opts.body); const authorization = typeof this.#authorization === "string" ? this.#authorization : `Bearer ${await this.#authorization.get() ?? await this.#authorization.provision()}`; const sudo = Deno.env.get("SUDO"); const headers = { "User-Agent": USER_AGENT, "Accept": opts.accept ?? "application/json", "Authorization": authorization, ...opts.body !== undefined ? opts.body instanceof FormData ? {} : { "Content-Type": "application/json" } : {}, ...sudo ? { ["x-deploy-sudo"]: sudo } : {} }; let res = await fetch(url, { method, headers, body }); if (this.#config.alwaysPrintXDenoRay) { this.#config.logger.notice(`x-deno-ray: ${res.headers.get("x-deno-ray")}`); } if (res.status === 401 && typeof this.#authorization === "object") { headers.Authorization = `Bearer ${await this.#authorization.provision()}`; res = await fetch(url, { method, headers, body }); } return res; } async #requestJson(path, opts) { const res = await this.request(path, opts); if (res.headers.get("Content-Type") !== "application/json") { const text = await res.text(); throw new Error(`Expected JSON, got '${text}'`); } const json = await res.json(); if (res.status !== 200) { const xDenoRay = res.headers.get("x-deno-ray"); throw new APIError(json.code, json.message, xDenoRay); } return json; } async #requestStream(path, opts) { const res = await this.request(path, opts); if (res.status !== 200) { const json = await res.json(); const xDenoRay = res.headers.get("x-deno-ray"); throw new APIError(json.code, json.message, xDenoRay); } if (res.body === null) { throw new Error("Stream ended unexpectedly"); } const lines = res.body.pipeThrough(new TextDecoderStream()).pipeThrough(new TextLineStream()); return async function*() { for await (const line of lines){ if (line === "") return; yield line; } }(); } async #requestJsonStream(path, opts) { const stream = await this.#requestStream(path, opts); return async function*() { for await (const line of stream){ yield JSON.parse(line); } }(); } async getOrganizationByName(name) { const organizations = await this.#requestJson(`/organizations`); for (const org of organizations){ if (org.name === name) { return org; } } } async getOrganizationById(id) { return await this.#requestJson(`/organizations/${id}`); } async createOrganization(name) { const body = { name }; return await this.#requestJson(`/organizations`, { method: "POST", body }); } async listOrganizations() { return await this.#requestJson(`/organizations`); } async getProject(id) { try { return await this.#requestJson(`/projects/${id}`); } catch (err) { if (err instanceof APIError && err.code === "projectNotFound") { return null; } throw err; } } async createProject(name, organizationId, envs) { const body = { name, organizationId, envs }; return await this.#requestJson(`/projects/`, { method: "POST", body }); } async renameProject(id, newName) { const body = { name: newName }; await this.#requestJson(`/projects/${id}`, { method: "PATCH", body }); } async deleteProject(id) { try { await this.#requestJson(`/projects/${id}`, { method: "DELETE" }); return true; } catch (err) { if (err instanceof APIError && err.code === "projectNotFound") { return false; } throw err; } } async listProjects(orgId) { const org = await this.#requestJson(`/organizations/${orgId}`); return org.projects; } async getDomains(projectId) { return await this.#requestJson(`/projects/${projectId}/domains`); } async listDeployments(projectId, page, limit) { const query = new URLSearchParams(); if (page !== undefined) { query.set("page", page.toString()); } if (limit !== undefined) { query.set("limit", limit.toString()); } try { const [list, paging] = await this.#requestJson(`/projects/${projectId}/deployments?${query}`); return { list, paging }; } catch (err) { if (err instanceof APIError && err.code === "projectNotFound") { return null; } throw err; } } async *listAllDeployments(projectId) { let totalPages = 1; let page = 0; while(totalPages > page){ const [deployments, paging] = await this.#requestJson(`/projects/${projectId}/deployments/?limit=50&page=${page}`); for (const deployment of deployments){ yield deployment; } totalPages = paging.totalPages; page = paging.page + 1; } } async getDeployment(deploymentId) { try { return await this.#requestJson(`/deployments/${deploymentId}`); } catch (err) { if (err instanceof APIError && err.code === "deploymentNotFound") { return null; } throw err; } } async deleteDeployment(deploymentId) { try { await this.#requestJson(`/v1/deployments/${deploymentId}`, { method: "DELETE" }); return true; } catch (err) { if (err instanceof APIError && err.code === "deploymentNotFound") { return false; } throw err; } } async redeployDeployment(deploymentId, redeployParams) { try { return await this.#requestJson(`/v1/deployments/${deploymentId}/redeploy?internal=true`, { method: "POST", body: redeployParams }); } catch (err) { if (err instanceof APIError && err.code === "deploymentNotFound") { return null; } throw err; } } getLogs(projectId, deploymentId) { return this.#requestJsonStream(`/projects/${projectId}/deployments/${deploymentId}/logs/`, { accept: "application/x-ndjson" }); } async queryLogs(projectId, deploymentId, params) { const searchParams = new URLSearchParams({ params: JSON.stringify(params) }); return await this.#requestJson(`/projects/${projectId}/deployments/${deploymentId}/query_logs?${searchParams.toString()}`); } async projectNegotiateAssets(id, manifest) { return await this.#requestJson(`/projects/${id}/assets/negotiate`, { method: "POST", body: manifest }); } pushDeploy(projectId, request, files) { const form = new FormData(); form.append("request", JSON.stringify(request)); for (const bytes of files){ form.append("file", new Blob([ bytes ])); } return this.#requestJsonStream(`/projects/${projectId}/deployment_with_assets`, { method: "POST", body: form }); } gitHubActionsDeploy(projectId, request, files) { const form = new FormData(); form.append("request", JSON.stringify(request)); for (const bytes of files){ form.append("file", new Blob([ bytes ])); } return this.#requestJsonStream(`/projects/${projectId}/deployment_github_actions`, { method: "POST", body: form }); } getMetadata() { return this.#requestJson("/meta"); } async streamMetering(project) { const streamGen = ()=>this.#requestStream(`/projects/${project}/stats`); let stream = await streamGen(); return async function*() { for(;;){ try { for await (const line of stream){ try { yield JSON.parse(line); } catch {} } } catch (error) { const interrupt = interruptSpinner(); const spinner = wait1(`Error: ${error}. Reconnecting...`).start(); await delay(5_000); stream = await streamGen(); spinner.stop(); interrupt.resume(); } } }(); } async getProjectDatabases(project) { try { return await this.#requestJson(`/projects/${project}/databases`); } catch (err) { if (err instanceof APIError && err.code === "projectNotFound") { return null; } throw err; } } async getDeploymentCrons(projectId, deploymentId) { return await this.#requestJson(`/projects/${projectId}/deployments/${deploymentId}/crons`); } async getProjectCrons(projectId) { try { return await this.#requestJson(`/projects/${projectId}/deployments/latest/crons`); } catch (err) { if (err instanceof APIError && err.code === "deploymentNotFound") { return null; } throw err; } } } async function calculateGitSha1(bytes) { const prefix = `blob ${bytes.byteLength}\0`; const prefixBytes = new TextEncoder().encode(prefix); const fullBytes = new Uint8Array(prefixBytes.byteLength + bytes.byteLength); fullBytes.set(prefixBytes); fullBytes.set(bytes, prefixBytes.byteLength); const hashBytes = await crypto.subtle.digest("SHA-1", fullBytes); const hashHex = Array.from(new Uint8Array(hashBytes)).map((b)=>b.toString(16).padStart(2, "0")).join(""); return hashHex; } function include(path, include, exclude) { if (include.length && !include.some((pattern)=>pattern.test(normalize2(path)))) { return false; } if (exclude.length && exclude.some((pattern)=>pattern.test(normalize2(path)))) { return false; } return true; } async function walk(cwd, dir, options) { const hashPathMap = new Map(); const manifestEntries = await walkInner(cwd, dir, hashPathMap, options); return { manifestEntries, hashPathMap }; } async function walkInner(cwd, dir, hashPathMap, options) { const entries = {}; for await (const file of Deno.readDir(dir)){ const path = join2(dir, file.name); const relative = path.slice(cwd.length); if (!file.isDirectory && !include(path.slice(cwd.length + 1), options.include, options.exclude)) { continue; } let entry; if (file.isFile) { const data = await Deno.readFile(path); const gitSha1 = await calculateGitSha1(data); entry = { kind: "file", gitSha1, size: data.byteLength }; hashPathMap.set(gitSha1, path); } else if (file.isDirectory) { if (relative === "/.git") continue; entry = { kind: "directory", entries: await walkInner(cwd, path, hashPathMap, options) }; } else if (file.isSymlink) { const target = await Deno.readLink(path); entry = { kind: "symlink", target }; } else { throw new Error(`Unreachable`); } entries[file.name] = entry; } return entries; } function convertPatternToRegExp(pattern) { return isGlob(pattern) ? new RegExp(globToRegExp2(normalize2(pattern)).toString().slice(1, -2)) : new RegExp(`^${normalize2(pattern)}`); } export { parseEntrypoint as parseEntrypoint }; export { API as API, APIError as APIError }; export { convertPatternToRegExp as convertPatternToRegExp, walk as walk }; export { fromFileUrl2 as fromFileUrl, resolve2 as resolve }; ================================================ FILE: action/index.js ================================================ import * as core from "@actions/core"; import * as github from "@actions/github"; import "./shim.js"; import { API, APIError, convertPatternToRegExp, fromFileUrl, parseEntrypoint, resolve, walk, } from "./deps.js"; import process from "node:process"; // The origin of the server to make Deploy requests to. const ORIGIN = process.env.DEPLOY_API_ENDPOINT ?? "https://dash.deno.com"; async function main() { const projectId = core.getInput("project", { required: true }); const entrypoint = core.getInput("entrypoint", { required: true }); const importMap = core.getInput("import-map", {}); const include = core.getMultilineInput("include", {}); const exclude = core.getMultilineInput("exclude", {}); const cwd = resolve(process.cwd(), core.getInput("root", {})); if (github.context.eventName === "pull_request") { const pr = github.context.payload.pull_request; const isPRFromFork = pr.head.repo.id !== pr.base.repo.id; if (isPRFromFork) { core.setOutput("deployment-id", ""); core.setOutput("url", ""); core.notice( "Deployments from forks are currently not supported by Deno Deploy. The deployment was skipped.", { title: "Skipped deployment on fork", }, ); return; } } const aud = new URL(`/projects/${projectId}`, ORIGIN); let token; try { token = await core.getIDToken(aud); } catch { throw "Failed to get the GitHub OIDC token. Make sure that this job has the required permissions for getting GitHub OIDC tokens (see https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings )."; } core.info(`Project: ${projectId}`); let url = await parseEntrypoint(entrypoint, cwd); if (url.protocol === "file:") { const path = fromFileUrl(url); if (!path.startsWith(cwd)) { throw "Entrypoint must be in the working directory (cwd, or specified root directory)."; } const entrypoint = path.slice(cwd.length); url = new URL(`file:///src${entrypoint}`); } core.info(`Entrypoint: ${url.href}`); let importMapUrl = null; if (importMap) { importMapUrl = await parseEntrypoint(importMap, cwd, "import map"); if (importMapUrl.protocol === "file:") { const path = fromFileUrl(importMapUrl); if (!path.startsWith(cwd)) { throw "Import map must be in the working directory (cwd, or specified root directory)."; } const importMap = path.slice(cwd.length); importMapUrl = new URL(`file:///src${importMap}`); } core.info(`Import map: ${importMapUrl.href}`); } core.debug(`Discovering assets in "${cwd}"`); const includes = include.flatMap((i) => i.split(",")).map((i) => i.trim()); const excludes = exclude.flatMap((e) => e.split(",")).map((i) => i.trim()); // Exclude node_modules by default unless explicitly specified if (!includes.some((i) => i.includes("node_modules"))) { excludes.push("**/node_modules"); } const { manifestEntries: entries, hashPathMap: assets } = await walk( cwd, cwd, { include: includes.map(convertPatternToRegExp), exclude: excludes.map(convertPatternToRegExp), }, ); core.debug(`Discovered ${assets.size} assets`); const api = new API(`GitHubOIDC ${token}`, ORIGIN, { alwaysPrintDenoRay: true, logger: core, }); const neededHashes = await api.projectNegotiateAssets(projectId, { entries, }); core.debug(`Determined ${neededHashes.length} need to be uploaded`); const files = []; for (const hash of neededHashes) { const path = assets.get(hash); if (path === undefined) { throw `Asset ${hash} not found.`; } const data = await Deno.readFile(path); files.push(data); } const totalSize = files.reduce((acc, file) => acc + file.length, 0); core.info( `Uploading ${neededHashes.length} file(s) (total ${totalSize} bytes)`, ); const manifest = { entries }; core.debug(`Manifest: ${JSON.stringify(manifest, null, 2)}`); const req = { url: url.href, importMapUrl: importMapUrl?.href ?? null, manifest, event: github.context.payload, }; const progress = await api.gitHubActionsDeploy(projectId, req, files); let deployment; for await (const event of progress) { switch (event.type) { case "staticFile": { const percentage = (event.currentBytes / event.totalBytes) * 100; core.info( `Uploading ${files.length} asset(s) (${percentage.toFixed(1)}%)`, ); break; } case "load": { const progress = event.seen / event.total * 100; core.info(`Deploying... (${progress.toFixed(1)}%)`); break; } case "uploadComplete": core.info("Finishing deployment..."); break; case "success": core.info("Deployment complete."); core.info("\nView at:"); for (const { domain } of event.domainMappings) { core.info(` - https://${domain}`); } deployment = event; break; case "error": throw event.ctx; } } core.setOutput("deployment-id", deployment.id); const domain = deployment.domainMappings[0].domain; core.setOutput("url", `https://${domain}/`); } try { await main(); } catch (error) { if (error instanceof APIError) { core.setFailed(error.toString()); } else { core.setFailed(error); } } ================================================ FILE: action/package.json ================================================ { "type": "module", "dependencies": { "@actions/core": "^1.10.0", "@actions/github": "^5.1.1", "@deno/shim-deno": "^0.19.2", "formdata-polyfill": "^4.0.10", "undici": "^5.11.0" } } ================================================ FILE: action/shim.js ================================================ import { fetch as realfetch } from "undici"; import { Deno } from "@deno/shim-deno"; import { TransformStream } from "stream/web"; import { FormData, formDataToBlob } from "formdata-polyfill/esm.min.js"; import Blob from "fetch-blob"; function fetch(url, init) { if (init.body instanceof FormData) { init.body = formDataToBlob(init.body, Blob); } return realfetch(url, init); } globalThis.fetch = fetch; globalThis.Deno = Deno; globalThis.TransformStream = TransformStream; globalThis.FormData = FormData; globalThis.Blob = Blob; ================================================ FILE: action/tests/README.md ================================================ These test modules are deployed by the [test GHA](../../.github/workflows/test.yml). Assertions are performed as deployment errors. ================================================ FILE: action/tests/always_exclude_node_modules/main.ts ================================================ try { await Deno.lstat(new URL(import.meta.resolve("./node_modules/import_bomb1"))); throw new Error("BOOM!"); } catch (e) { if (!(e instanceof Deno.errors.NotFound)) { throw e; } } try { await Deno.lstat(new URL(import.meta.resolve("./node_modules/import_bomb2"))); throw new Error("BOOM!"); } catch (e) { if (!(e instanceof Deno.errors.NotFound)) { throw e; } } Deno.serve(() => new Response("Hello World")); ================================================ FILE: action/tests/hello.ts ================================================ import { serve } from "std/http/server.ts"; async function handler(_req: Request) { const text = await Deno.readTextFile(new URL(import.meta.url)); return new Response(text, { headers: { "content-type": "text/plain; charset=utf8" }, }); } console.log("Listening on http://localhost:8000"); serve(handler); ================================================ FILE: action/tests/import_bomb1 ================================================ ================================================ FILE: action/tests/import_bomb2 ================================================ ================================================ FILE: action/tests/import_map.json ================================================ { "imports": { "std/": "https://deno.land/std@0.128.0/" } } ================================================ FILE: action/tests/include_exclude.ts ================================================ try { await Deno.lstat(new URL(import.meta.resolve("./import_bomb1"))); throw new Error("BOOM!"); } catch (e) { if (!(e instanceof Deno.errors.NotFound)) { throw e; } } try { await Deno.lstat(new URL(import.meta.resolve("./import_bomb2"))); throw new Error("BOOM!"); } catch (e) { if (!(e instanceof Deno.errors.NotFound)) { throw e; } } Deno.serve(() => new Response("Hello World")); ================================================ FILE: action.yml ================================================ name: Deploy to Deno Deploy description: Deploy your applications to Deno Deploy, right from GitHub Actions author: Deno Land Inc branding: color: gray-dark icon: globe inputs: project: description: The name or ID of the project to deploy required: true entrypoint: description: The path or URL to the entrypoint file required: true import-map: description: The path or URL to an import map file required: false include: description: Only upload files that match this pattern (multiline and/or comma-separated) required: false exclude: description: Exclude files that match this pattern (multiline and/or comma-separated) required: false root: description: The path to the directory containing the code and assets to upload required: false outputs: deployment-id: description: The ID of the created deployment url: description: The URL where the deployment is reachable runs: using: node20 main: action/index.js ================================================ FILE: deno.jsonc ================================================ { "name": "@deno/deployctl", "version": "1.13.1", "exports": "./deployctl.ts", "fmt": { "exclude": ["action/node_modules/"] }, "lint": { "exclude": ["action/node_modules/"] }, "tasks": { "test": "deno test -A --unstable tests/ src/", "build-action": "deno run --allow-read --allow-write --allow-net=jsr.io:443 --allow-env ./tools/bundle.ts ./src/utils/mod.ts > ./action/deps.js", "version-match": "deno run --allow-read --allow-env ./tools/version_match.ts" }, "imports": { "@std/fmt": "jsr:@std/fmt@0.217", "@std/fmt/colors": "jsr:@std/fmt@0.217/colors", "@std/path": "jsr:@std/path@0.217", "@std/flags": "jsr:@std/flags@0.217", "@std/streams": "jsr:@std/streams@0.217", "@std/streams/text_line_stream": "jsr:@std/streams@0.217/text_line_stream", "@std/jsonc": "jsr:@std/jsonc@0.217", "@std/encoding": "jsr:@std/encoding@0.217", "@std/async": "jsr:@std/async@0.217", "@std/async/delay": "jsr:@std/async@0.217/delay", "@std/dotenv": "jsr:@std/dotenv@0.217", "@std/semver": "jsr:@std/semver@0.217", "@std/assert": "jsr:@std/assert@0.217", "@denosaurs/wait": "jsr:@denosaurs/wait@0.2.2", "@denosaurs/tty": "jsr:@denosaurs/tty@0.2.1", "@deno/emit": "jsr:@deno/emit@0.46.0" } } ================================================ FILE: deployctl.ts ================================================ #!/usr/bin/env -S deno run --allow-read --allow-write --allow-env --allow-net --allow-run --allow-sys --quiet // Copyright 2021 Deno Land Inc. All rights reserved. MIT license. import { greaterOrEqual as semverGreaterThanOrEquals, parse as semverParse, } from "@std/semver"; import { setColorEnabled } from "@std/fmt/colors"; import { type Args, parseArgs } from "./src/args.ts"; import { error } from "./src/error.ts"; import deploySubcommand from "./src/subcommands/deploy.ts"; import upgradeSubcommand from "./src/subcommands/upgrade.ts"; import logsSubcommand from "./src/subcommands/logs.ts"; import topSubcommand from "./src/subcommands/top.ts"; import projectsSubcommand from "./src/subcommands/projects.ts"; import deploymentsSubcommand from "./src/subcommands/deployments.ts"; import apiSubcommand from "./src/subcommands/api.ts"; import { MINIMUM_DENO_VERSION, VERSION } from "./src/version.ts"; import { fetchReleases, getConfigPaths } from "./src/utils/info.ts"; import configFile from "./src/config_file.ts"; import inferConfig from "./src/config_inference.ts"; import { wait } from "./src/utils/spinner.ts"; const help = `deployctl ${VERSION} Command line tool for Deno Deploy. SUBCOMMANDS: deploy Deploy a script with static files to Deno Deploy projects Manage projects deployments Manage deployments logs View logs for the given project top Monitor projects resource usage in real time upgrade Upgrade deployctl to the given version (defaults to latest) api Perform raw HTTP requests against the Deploy API For more detailed help on each subcommand, use: deployctl -h `; if ( !semverGreaterThanOrEquals( semverParse(Deno.version.deno), semverParse(MINIMUM_DENO_VERSION), ) ) { error( `The Deno version you are using is too old. Please update to Deno ${MINIMUM_DENO_VERSION} or later. To do this run \`deno upgrade\`.`, ); } const args = parseArgs(Deno.args); setColoring(args); if (Deno.stdin.isTerminal()) { let latestVersion; // Get the path to the update information json file. const { updatePath } = getConfigPaths(); // Try to read the json file. const updateInfoJson = await Deno.readTextFile(updatePath).catch((error) => { if (error.name == "NotFound") return null; console.error(error); }); if (updateInfoJson) { const updateInfo = JSON.parse(updateInfoJson) as { lastFetched: number; latest: number; }; const moreThanADay = Math.abs(Date.now() - updateInfo.lastFetched) > 24 * 60 * 60 * 1000; // Fetch the latest release if it has been more than a day since the last // time the information about new version is fetched. if (moreThanADay) { fetchReleases(); } else { latestVersion = updateInfo.latest; } } else { fetchReleases(); } // If latestVersion is set we need to inform the user about a new release. if ( latestVersion && !(semverGreaterThanOrEquals( semverParse(VERSION), semverParse(latestVersion.toString()), )) ) { console.error( [ `A new release of deployctl is available: ${VERSION} -> ${latestVersion}`, "To upgrade, run `deployctl upgrade`", `https://github.com/denoland/deployctl/releases/tag/${latestVersion}\n`, ].join("\n"), ); } } const subcommand = args._.shift(); switch (subcommand) { case "deploy": await setDefaultsFromConfigFile(args); await inferConfig(args); await deploySubcommand(args); break; case "upgrade": await setDefaultsFromConfigFile(args); await upgradeSubcommand(args); break; case "logs": await setDefaultsFromConfigFile(args); await logsSubcommand(args); break; case "top": await setDefaultsFromConfigFile(args); await topSubcommand(args); break; case "projects": await setDefaultsFromConfigFile(args); await projectsSubcommand(args); break; case "deployments": await setDefaultsFromConfigFile(args); await deploymentsSubcommand(args); break; case "api": await apiSubcommand(args); break; default: if (args.version) { console.log(`deployctl ${VERSION}`); Deno.exit(0); } if (args.help) { console.log(help); Deno.exit(0); } console.error(help); Deno.exit(1); } async function setDefaultsFromConfigFile(args: Args) { const loadFileConfig = !args.version && !args.help; if (loadFileConfig) { const config = await configFile.read( args.config ?? configFile.cwdOrAncestors(), ); if (config === null && args.config !== undefined && !args["save-config"]) { error( `Could not find or read the config file '${args.config}'. Use --save-config to create it.`, ); } if (config !== null) { wait("").start().info(`Using config file '${config.path()}'`); config.useAsDefaultFor(args); // Set the effective config path for the rest of the execution args.config = config.path(); } } } function setColoring(args: Args) { switch (args.color) { case "auto": setAutoColoring(); break; case "always": setColorEnabled(true); break; case "never": setColorEnabled(false); break; default: wait("").start().warn( `'${args.color}' value for the --color option is not valid. Valid values are 'auto', 'always' and 'never'. Defaulting to 'auto'`, ); setAutoColoring(); } } function setAutoColoring() { if (Deno.stdout.isTerminal()) { setColorEnabled(true); } else { setColorEnabled(false); } } ================================================ FILE: examples/README.md ================================================ # Examples - [Hello-World](./hello-world/) - [Link Shortener](./link-shortener/) - [Fresh Hello-World](./fresh/) Make sure to visit the [Deno Deploy docs](https://docs.deno.com/deploy/tutorials) which has an extensive section of tutorials about how to build different use cases. ================================================ FILE: examples/fresh/README.md ================================================ # Fresh project Your new Fresh project is ready to go. You can follow the Fresh "Getting Started" guide here: https://fresh.deno.dev/docs/getting-started ### Usage Make sure to install Deno: https://deno.land/manual/getting_started/installation Then start the project: ``` deno task start ``` This will watch the project directory and restart as necessary. ================================================ FILE: examples/fresh/components/Button.tsx ================================================ import type { JSX } from "preact"; import { IS_BROWSER } from "$fresh/runtime.ts"; export function Button(props: JSX.HTMLAttributes) { return (

{props.count}

); } ================================================ FILE: examples/fresh/main.ts ================================================ /// /// /// /// /// import "$std/dotenv/load.ts"; import { start } from "$fresh/server.ts"; import manifest from "./fresh.gen.ts"; import config from "./fresh.config.ts"; await start(manifest, config); ================================================ FILE: examples/fresh/routes/_404.tsx ================================================ import { Head } from "$fresh/runtime.ts"; export default function Error404() { return ( <> 404 - Page not found
the Fresh logo: a sliced lemon dripping with juice

404 - Page not found

The page you were looking for doesn't exist.

Go back home
); } ================================================ FILE: examples/fresh/routes/_app.tsx ================================================ import type { AppProps } from "$fresh/server.ts"; export default function App({ Component }: AppProps) { return ( fresh-site ); } ================================================ FILE: examples/fresh/routes/api/joke.ts ================================================ import type { HandlerContext } from "$fresh/server.ts"; // Jokes courtesy of https://punsandoneliners.com/randomness/programmer-jokes/ const JOKES = [ "Why do Java developers often wear glasses? They can't C#.", "A SQL query walks into a bar, goes up to two tables and says “can I join you?”", "Wasn't hard to crack Forrest Gump's password. 1forrest1.", "I love pressing the F5 key. It's refreshing.", "Called IT support and a chap from Australia came to fix my network connection. I asked “Do you come from a LAN down under?”", "There are 10 types of people in the world. Those who understand binary and those who don't.", "Why are assembly programmers often wet? They work below C level.", "My favourite computer based band is the Black IPs.", "What programme do you use to predict the music tastes of former US presidential candidates? An Al Gore Rhythm.", "An SEO expert walked into a bar, pub, inn, tavern, hostelry, public house.", ]; export const handler = (_req: Request, _ctx: HandlerContext): Response => { const randomIndex = Math.floor(Math.random() * JOKES.length); const body = JOKES[randomIndex]; return new Response(body); }; ================================================ FILE: examples/fresh/routes/greet/[name].tsx ================================================ import type { PageProps } from "$fresh/server.ts"; export default function Greet(props: PageProps) { return
Hello {props.params.name}
; } ================================================ FILE: examples/fresh/routes/index.tsx ================================================ import { useSignal } from "@preact/signals"; import Counter from "../islands/Counter.tsx"; export default function Home() { const count = useSignal(3); return (
the Fresh logo: a sliced lemon dripping with juice

Welcome to Fresh

Try updating this message in the ./routes/index.tsx file, and refresh.

); } ================================================ FILE: examples/fresh/twind.config.ts ================================================ import type { Options } from "$fresh/plugins/twind.ts"; export default { selfURL: import.meta.url, } as Options; ================================================ FILE: examples/hello-world/deno.json ================================================ { "deploy": { "exclude": [], "include": [], "entrypoint": "main.ts" } } ================================================ FILE: examples/hello-world/main.ts ================================================ Deno.serve((_req) => new Response("Hello World")); ================================================ FILE: examples/link-shortener/deno.json ================================================ { "deploy": { "exclude": [], "include": [], "entrypoint": "main.ts" } } ================================================ FILE: examples/link-shortener/main.ts ================================================ const kv = await Deno.openKv(); Deno.serve(async (request: Request) => { // Create short links if (request.method == "POST") { const body = await request.text(); const { slug, url } = JSON.parse(body); const result = await kv.set(["links", slug], url); return new Response(JSON.stringify(result)); } // Redirect short links const slug = request.url.split("/").pop() || ""; const url = (await kv.get(["links", slug])).value as string; if (url) { return Response.redirect(url, 301); } else { const m = !slug ? "Please provide a slug." : `Slug "${slug}" not found`; return new Response(m, { status: 404 }); } }); ================================================ FILE: src/args.ts ================================================ // Copyright 2021 Deno Land Inc. All rights reserved. MIT license. import { parse } from "@std/flags"; export function parseArgs(args: string[]) { const parsed = parse(args, { alias: { "help": "h", "version": "V", "project": "p", }, boolean: [ "help", "prod", "last", "static", "version", "dry-run", "save-config", "force", ], string: [ "project", "token", "include", "exclude", "import-map", "deployment", "since", "until", "grep", "levels", "regions", "limit", "page", "config", "entrypoint", "org", "format", "color", "region", "id", "prev", "next", "method", "body", "db", "env", "env-file", ], collect: [ "grep", "include", "exclude", "region", "prev", "next", "env", "env-file", ], default: { static: true, config: Deno.env.get("DEPLOYCTL_CONFIG_FILE"), token: Deno.env.get("DENO_DEPLOY_TOKEN"), org: Deno.env.get("DEPLOYCTL_ORGANIZATION"), color: "auto", }, }); return parsed; } export type Args = ReturnType; ================================================ FILE: src/config_file.ts ================================================ // Copyright 2021 Deno Land Inc. All rights reserved. MIT license. import { cyan, green, magenta, red } from "@std/fmt/colors"; import * as JSONC from "@std/jsonc"; import { dirname, extname, join, relative, resolve } from "@std/path"; import { error } from "./error.ts"; import { isURL } from "./utils/entrypoint.ts"; import { wait } from "./utils/spinner.ts"; const DEFAULT_FILENAME = "deno.json"; const CANDIDATE_FILENAMES = [DEFAULT_FILENAME, "deno.jsonc"]; /** Arguments persisted in the deno.json config file */ interface ConfigArgs { project?: string; entrypoint?: string; include?: string[]; exclude?: string[]; } class ConfigFile { #path: string; #content: { deploy?: ConfigArgs }; constructor(path: string, content: { deploy?: ConfigArgs }) { this.#path = path; this.#content = { ...content, deploy: content.deploy && this.normalize(content.deploy), }; } /** * Create a new `ConfigFile` using an object that _at least_ contains the `ConfigArgs`. * * Ignores any property in `args` not meant to be persisted. */ static create(path: string, args: ConfigArgs) { const config = new ConfigFile(path, { deploy: {} }); // Use override to clean-up args config.override(args); return config; } /** * Override the `ConfigArgs` of this ConfigFile. * * Ignores any property in `args` not meant to be persisted. */ override(args: ConfigArgs) { const normalizedArgs = this.normalize(args); this.#content.deploy = normalizedArgs; } /** * For every arg in `ConfigArgs`, if the `args` argument object does not contain * the arg, fill it with the value in this `ConfigFile`, if any. */ useAsDefaultFor(args: ConfigArgs) { for (const [key, thisValue] of Object.entries(this.args())) { // deno-lint-ignore no-explicit-any const argValue = (args as any)[key]; if ( (argValue === undefined || Array.isArray(argValue) && argValue.length === 0) && thisValue ) { // deno-lint-ignore no-explicit-any (args as any)[key] = thisValue; } } } /** Returns all the differences between this `ConfigArgs` and the one provided as argument. * * The comparison is performed against the JSON output of each config. The "other" args are * sematically considered additions in the return value. Ignores any property in `args` not meant * to be persisted. */ diff(args: ConfigArgs): Change[] { const changes = []; const otherConfigOutput = JSON.parse(ConfigFile.create(this.path(), args).toFileContent()).deploy ?? {}; const thisConfigOutput = JSON.parse(this.toFileContent()).deploy ?? {}; // Iterate over the other args as they might include args not yet persisted in the config file for (const [key, otherValue] of Object.entries(otherConfigOutput)) { const thisValue = thisConfigOutput[key]; if (Array.isArray(otherValue) && Array.isArray(thisValue)) { if ( thisValue.length !== otherValue.length || !thisValue.every((x, i) => otherValue[i] === x) ) { changes.push({ key, removal: thisValue, addition: otherValue }); } } else if (thisValue !== otherValue) { changes.push({ key, removal: thisValue, addition: otherValue }); } } return changes; } normalize(args: ConfigArgs): ConfigArgs { // Copy object as normalization is internal to the config file const normalizedArgs = { project: args.project, exclude: args.exclude, include: args.include, entrypoint: (args.entrypoint && !isURL(args.entrypoint)) ? resolve(args.entrypoint) // Backoff if entrypoint is URL, the user knows what they're doing : args.entrypoint, }; return normalizedArgs; } /** Return whether the `ConfigFile` has the `deploy` namespace */ hasDeployConfig() { return this.#content.deploy !== undefined; } static fromFileContent(filepath: string, content: string) { const parsedContent = JSONC.parse(content) as { deploy?: ConfigArgs }; const configContent = { ...parsedContent, deploy: parsedContent.deploy && { ...parsedContent.deploy, entrypoint: parsedContent.deploy.entrypoint && (isURL(parsedContent.deploy.entrypoint) // Backoff if entrypoint is URL, the user knows what they're doing ? parsedContent.deploy.entrypoint // entrypoint must be interpreted as absolute or relative to the config file : resolve(dirname(filepath), parsedContent.deploy.entrypoint)), }, }; return new ConfigFile(filepath, configContent); } toFileContent() { const content = { ...this.#content, deploy: this.#content.deploy && { ...this.#content.deploy, entrypoint: this.#content.deploy.entrypoint && (isURL(this.#content.deploy.entrypoint) // Backoff if entrypoint is URL, the user knows what they're doing ? this.#content.deploy.entrypoint // entrypoint must be stored relative to the config file : relative(dirname(this.#path), this.#content.deploy.entrypoint)), }, }; return JSON.stringify(content, null, 2); } path() { return this.#path; } args() { return (this.#content.deploy ?? {}); } } export default { /** Read a `ConfigFile` from disk */ async read( path: string | Iterable, ): Promise { const paths = typeof path === "string" ? [path] : path; for (const filepath of paths) { let content; try { content = await Deno.readTextFile(filepath); } catch { // File not found, try next continue; } try { return ConfigFile.fromFileContent(filepath, content); } catch (e) { error(e); } } // config file not found return null; }, /** * Write `ConfigArgs` to the config file. * * @param path {string | null} path where to write the config file. If the file already exists and * `override` is `true`, its content will be merged with the `args` * argument. If null, will default to `DEFAULT_FILENAME`. * @param args {ConfigArgs} args to be upserted into the config file. * @param overwrite {boolean} control whether an existing config file should be overwritten. */ maybeWrite: async function ( path: string | null, args: ConfigArgs, overwrite: boolean, ): Promise { const pathOrDefault = path ?? DEFAULT_FILENAME; const isJsonc = extname(pathOrDefault) === ".jsonc"; const existingConfig = await this.read(pathOrDefault); const changes = existingConfig?.diff(args) ?? []; let config; if (existingConfig && changes.length === 0) { // There are no changes to write return; } else if ( existingConfig && existingConfig.hasDeployConfig() && !overwrite ) { // There are changes to write and there's already some deploy config, we require the --save-config flag wait("").start().info( `Some of the config used differ from the config found in '${existingConfig.path()}'. Use --save-config to overwrite it.`, ); return; } else if (existingConfig) { // Either there is no deploy config in the config file or the user is using --save-config flag if (isJsonc) { const msg = overwrite ? `Writing to the config file '${pathOrDefault}' will remove any existing comment and format it as a plain JSON file. Is that ok?` : `I want to store some configuration in '${pathOrDefault}' config file but this will remove any existing comment from it. Is that ok?`; const confirmation = confirm(`${magenta("?")} ${msg}`); if (!confirmation) { const formattedChanges = existingConfig.hasDeployConfig() ? cyan( ` "deploy": {\n ...\n${formatChanges(changes, 2, 2)}\n }`, ) : green( ConfigFile.create(pathOrDefault, args).toFileContent().slice( 2, -2, ), ); wait({ text: "", indent: 3 }).start().info( `I understand. Here's the config I wanted to write:\n${formattedChanges}`, ); return; } } existingConfig.override(args); config = existingConfig; } else { // The config file does not exist. Create a new one. config = ConfigFile.create(pathOrDefault, args); } await Deno.writeTextFile( config.path(), (config satisfies ConfigFile).toFileContent(), ); wait("").start().succeed( `${ existingConfig ? "Updated" : "Created" } config file '${config.path()}'.`, ); }, cwdOrAncestors: function* () { let wd = Deno.cwd(); while (wd) { for (const filename of CANDIDATE_FILENAMES) { yield join(wd, filename); } const newWd = dirname(wd); if (newWd === wd) { return; } else { wd = newWd; } } }, }; function formatChanges( changes: Change[], indent?: number, gap?: number, ): string { const removals = []; const additions = []; const padding = " ".repeat(indent ?? 0); const innerPadding = " ".repeat(gap ?? 0); for (const { key, removal, addition } of changes) { if (removal !== undefined) { removals.push(red( `${padding}-${innerPadding}"${key}": ${JSON.stringify(removal)}`, )); } if (addition !== undefined) { additions.push(green( `${padding}+${innerPadding}"${key}": ${JSON.stringify(addition)}`, )); } } return [removals.join(red(",\n")), additions.join(green(",\n"))].join("\n") .trim(); } interface Change { key: string; removal?: unknown; addition?: unknown; } ================================================ FILE: src/config_inference.ts ================================================ // Copyright 2021 Deno Land Inc. All rights reserved. MIT license. import { magenta } from "@std/fmt/colors"; import { basename } from "@std/path/basename"; import { API, APIError, endpoint } from "./utils/api.ts"; import TokenProvisioner from "./utils/access_token.ts"; import { wait } from "./utils/spinner.ts"; import { error } from "./error.ts"; import organization from "./utils/organization.ts"; const NONAMES = ["src", "lib", "code", "dist", "build", "shared", "public"]; /** Arguments inferred from context */ interface InferredArgs { project?: string; entrypoint?: string; exclude: string[]; include: string[]; } /** * Infer name of the project. * * The name of the project is inferred from either of the following options, in order: * - If the project is in a git repo, infer `-` * - Otherwise, use the directory name from where DeployCTL is being executed, * unless the name is useless like "src" or "dist". */ async function inferProject(api: API, dryRun: boolean, orgName?: string) { wait("").start().warn( "No project name or ID provided with either the --project arg or a config file.", ); let projectName = await inferProjectFromOriginUrl() || inferProjectFromCWD(); if (!projectName) { return; } if (dryRun) { wait("").start().succeed( `Guessed project name '${projectName}'.`, ); wait({ text: "", indent: 3 }).start().info( "This is a dry run. In a live run the guessed name might be different if this one is invalid or already used.", ); return projectName; } const org = orgName ? await organization.getByNameOrCreate(api, orgName) : null; for (;;) { let spinner; if (projectName) { spinner = wait( `Guessing project name '${projectName}': creating project...`, ).start(); } else { spinner = wait("Creating new project with a random name...").start(); } try { const project = await api.createProject(projectName, org?.id); if (projectName) { spinner.succeed( `Guessed project name '${project.name}'.`, ); } else { spinner.succeed(`Created new project '${project.name}'`); } wait({ text: "", indent: 3 }).start().info( `You can always change the project name with 'deployctl projects rename new-name' or in ${endpoint()}/projects/${project.name}/settings`, ); return project.name; } catch (e) { if (e instanceof APIError && e.code == "projectNameInUse") { spinner.stop(); spinner = wait( `Guessing project name '${projectName}': this project name is already used. Checking ownership...`, ).start(); const hasAccess = projectName && (await api.getProject(projectName)) !== null; if (hasAccess) { spinner.stop(); const confirmation = confirm( `${ magenta("?") } Guessing project name '${projectName}': you already own this project. Should I deploy to it?`, ); if (confirmation) { return projectName; } } projectName = `${projectName}-${Math.floor(Math.random() * 100)}`; spinner.stop(); } else if (e instanceof APIError && e.code == "slugInvalid") { // Fallback to random name given by the API projectName = undefined; spinner.stop(); } else { spinner.fail( `Guessing project name '${projectName}': Creating project...`, ); error(e); } } } } async function inferProjectFromOriginUrl() { let originUrl = await getOriginUrlUsingGitCmd(); if (!originUrl) { originUrl = await getOriginUrlUsingFS(); } if (!originUrl) { return; } const result = originUrl.match( /[:\/]+(?[^\/]+)\/(?[^\/]+?)(?:\.git)?$/, )?.groups; if (result) { return `${result.org}-${result.repo}`; } } function inferProjectFromCWD() { const projectName = basename(Deno.cwd()) .toLowerCase() .replaceAll(/[\s_]/g, "-") .replaceAll(/[^a-z,A-Z,-]/g, "") .slice(0, 26); if (NONAMES.every((n) => n !== projectName)) { return projectName; } } /** Try getting the origin remote URL using the git command */ async function getOriginUrlUsingGitCmd(): Promise { try { const cmd = await new Deno.Command("git", { args: ["remote", "get-url", "origin"], }).output(); if (cmd.stdout.length !== 0) { return new TextDecoder().decode(cmd.stdout).trim(); } } catch (_) { return; } } /** Try getting the origin remote URL reading the .git/config file */ async function getOriginUrlUsingFS(): Promise { // We assume cwd is the root of the repo. We favor false-negatives over false-positives, and this // is a last-resort fallback anyway try { const config: string = await Deno.readTextFile(".git/config"); const originSectionStart = config.indexOf('[remote "origin"]'); const originSectionEnd = config.indexOf("[", originSectionStart + 1); return config.slice(originSectionStart, originSectionEnd).match( /url\s*=\s*(?.+)/, ) ?.groups ?.url ?.trim(); } catch { return; } } const ENTRYPOINT_PATHS = ["main", "index", "src/main", "src/index"]; const ENTRYPOINT_EXTENSIONS = ["ts", "js", "tsx", "jsx"]; /** * Infer the entrypoint of the project * * The current algorithm infers the entrypoint if one and only one of the following * files is found: * - main.[tsx|ts|jsx|js] * - index.[tsx|ts|jsx|js] * - src/main.[tsx|ts|jsx|js] * - src/index.[tsx|ts|jsx|js] */ async function inferEntrypoint() { const candidates = []; for (const path of ENTRYPOINT_PATHS) { for (const extension of ENTRYPOINT_EXTENSIONS) { candidates.push(present(`${path}.${extension}`)); } } const candidatesPresent = (await Promise.all(candidates)).filter((c) => c !== undefined ); if (candidatesPresent.length === 1) { return candidatesPresent[0]; } else { return; } } async function present(path: string): Promise { try { await Deno.lstat(path); return path; } catch { return; } } export default async function inferConfig( args: InferredArgs & { token?: string; help?: boolean; version?: boolean; "dry-run"?: boolean; org?: string; }, ) { if (args.help || args.version) { return; } const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); if (args.project === undefined) { args.project = await inferProject(api, !!args["dry-run"], args.org); } if (args.entrypoint === undefined) { args.entrypoint = await inferEntrypoint(); if (args.entrypoint) { wait("").start().warn( `No entrypoint provided with either the --entrypoint arg or a config file. I've guessed '${args.entrypoint}' for you.`, ); wait({ text: "", indent: 3 }).start().info( "Is this wrong? Please let us know in https://github.com/denoland/deployctl/issues/new", ); } } if (!args.include.some((i) => i.includes("node_modules"))) { args.exclude.push("**/node_modules"); } } ================================================ FILE: src/error.ts ================================================ // Copyright 2024 Deno Land Inc. All rights reserved. MIT license. import { bold, red } from "@std/fmt/colors"; export function error(err: unknown): never { const message = stringify(err); console.error(red(`${bold("error")}: ${message}`)); Deno.exit(1); } export type StringifyOptions = { verbose: boolean; }; const DEFAULT_STRINGIFY_OPTIONS: StringifyOptions = { verbose: false, }; export function stringify( err: unknown, options?: Partial, ): string { const opts = options === undefined ? DEFAULT_STRINGIFY_OPTIONS : { ...DEFAULT_STRINGIFY_OPTIONS, ...options }; if (err instanceof Error) { if (opts.verbose) { return stringifyErrorLong(err); } else { return stringifyErrorShort(err); } } if (typeof err === "string") { return err; } return JSON.stringify(err); } function stringifyErrorShort(err: Error): string { return `${err.name}: ${err.message}`; } function stringifyErrorLong(err: Error): string { const cause = err.cause === undefined ? "" : `\nCaused by ${stringify(err.cause, { verbose: true })}`; if (!err.stack) { return `${err.name}: ${err.message}${cause}`; } return `${err.stack}${cause}`; } ================================================ FILE: src/error_test.ts ================================================ // Copyright 2024 Deno Land Inc. All rights reserved. MIT license. import { stringify } from "./error.ts"; import { assert, assertEquals, assertStringIncludes } from "@std/assert"; Deno.test("stringify string", () => { assertEquals(stringify("test"), "test"); }); Deno.test("stringify number", () => { assertEquals(stringify(42), "42"); }); Deno.test("stringify object", () => { assertEquals(stringify({ foo: 42 }), '{"foo":42}'); }); Deno.test("stringify Error (verbose: false)", () => { const got = stringify(new Error("boom")); assertEquals(got, "Error: boom"); }); Deno.test("stringify Error (verbose: true)", () => { const got = stringify(new Error("boom"), { verbose: true }); assert(got.startsWith("Error: boom\n at "), `assert failed: ${got}`); }); Deno.test("stringify Error with cause (cause is also Error) (verbose: false)", () => { const e1 = new TypeError("e1"); const e2 = new SyntaxError("e2", { cause: e1 }); const got = stringify(e2); assertEquals(got, "SyntaxError: e2"); }); Deno.test("stringify Error with cause (cause is also Error) (verbose: true)", () => { const e1 = new TypeError("e1"); const e2 = new SyntaxError("e2", { cause: e1 }); const got = stringify(e2, { verbose: true }); assert( got.startsWith("SyntaxError: e2\n at "), `assert failed: ${got}`, ); assertStringIncludes(got, "Caused by TypeError: e1\n at "); }); Deno.test("stringify Error with cause (cause is number) (verbose: false)", () => { const e = new Error("e", { cause: 42 }); const got = stringify(e); assertEquals(got, "Error: e"); }); Deno.test("stringify Error with cause (cause is number) (verbose: true)", () => { const e = new Error("e", { cause: 42 }); const got = stringify(e, { verbose: true }); assert( got.startsWith("Error: e\n at "), `assert failed: ${got}`, ); assert( got.endsWith("Caused by 42"), `assert failed: ${got}`, ); }); ================================================ FILE: src/subcommands/api.ts ================================================ import type { Args } from "../args.ts"; import { API } from "../utils/mod.ts"; import TokenProvisioner from "../utils/access_token.ts"; import { error } from "../error.ts"; import { wait } from "../utils/spinner.ts"; const help = `Perform API calls to any endpoint of the Deploy API (ALPHA) EXAMPLES: Get the details of an organization: deployctl api organizations/04f19625-35d3-4c05-857e-bcaa3b0af374 Create a project in an organization: deployctl api --method=POST --body='{"name": "my-project"}' organizations/04f19625-35d3-4c05-857e-bcaa3b0af374/projects You can find the specification of the API in https://apidocs.deno.com USAGE: deployctl api [OPTIONS] OPTIONS: -h, --help Prints this help information --method= HTTP method to use (defaults to GET) --body= Body of the request. The provided string is sent as is to the API --format= Output an overview of the response with the headers and the (possibly truncated) body, or just the body (verbatim). Defaults to 'overview' when stdout is a tty, and 'body' otherwise. --token= The API token to use (defaults to auto-provisioned token) `; export default async function (args: Args): Promise { if (args.help) { console.log(help); Deno.exit(0); } let endpoint = args._.shift()?.toString(); if (!endpoint) { error( "Missing endpoint positional argument. USAGE: deployctl api ", ); } let format: "overview" | "body"; switch (args.format) { case "overview": case "body": format = args.format; break; case undefined: format = Deno.stdout.isTerminal() ? "overview" : "body"; break; default: error( `Invalid format '${args.format}'. Supported values for the --format option are 'overview' or 'body'`, ); } if (!endpoint.startsWith("/")) { endpoint = `/${endpoint}`; } if (!/^\/v\d+\//.test(endpoint)) { endpoint = `/v1${endpoint}`; } const method = (args.method || "GET").toUpperCase(); const spinner = wait(`Requesting API endpoint '${endpoint}'...`).start(); const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); try { const response = await api.request(endpoint, { method, body: args.body, }); spinner.succeed(`Received response from the API`); switch (format) { case "overview": { const body = response.headers.get("Content-Type") === "application/json" ? await response.json() : await response.text(); const headers = response.headers; console.log("-----[ HEADERS ]-----"); console.log(method, response.url); console.log("Status:", response.status); console.log(headers); console.log("-----[ BODY ]--------"); console.log(body); break; } case "body": { console.log(await response.text()); break; } } } catch (err) { error(err); } } ================================================ FILE: src/subcommands/deploy.ts ================================================ // Copyright 2021 Deno Land Inc. All rights reserved. MIT license. import type { Spinner } from "@denosaurs/wait"; import { fromFileUrl } from "@std/path/from_file_url"; import { envVarsFromArgs } from "../utils/env_vars.ts"; import { wait } from "../utils/spinner.ts"; import configFile from "../config_file.ts"; import { error } from "../error.ts"; import { API, APIError, endpoint } from "../utils/api.ts"; import type { ManifestEntry } from "../utils/api_types.ts"; import { parseEntrypoint } from "../utils/entrypoint.ts"; import { containsEntryInManifest, convertPatternToRegExp, walk, } from "../utils/manifest.ts"; import TokenProvisioner from "../utils/access_token.ts"; import type { Args as RawArgs } from "../args.ts"; import organization from "../utils/organization.ts"; import { relative } from "@std/path/relative"; import { yellow } from "@std/fmt/colors"; const help = `deployctl deploy Deploy a script with static files to Deno Deploy. Basic usage: deployctl deploy By default, deployctl will guess the project name based on the Git repo or directory it is in. Similarly, it will guess the entrypoint by looking for files with common entrypoint names (main.ts, src/main.ts, etc). After the first deployment, the settings used will be stored in a config file (by default deno.json). You can specify the project name and/or the entrypoint using the --project and --entrypoint arguments respectively: deployctl deploy --project=helloworld --entrypoint=src/entrypoint.ts By default, deployctl deploys all the files in the current directory (recursively, except node_modules directories). You can customize this behaviour using the --include and --exclude arguments (also supported in the config file). Here are some examples: - Include only source and static files: deployctl deploy --include=./src --include=./static - Include only Typescript files: deployctl deploy --include=**/*.ts - Exclude local tooling and artifacts deployctl deploy --exclude=./tools --exclude=./benches A common pitfall is to not include the source code modules that need to be run (entrypoint and dependencies). The following example will fail because main.ts is not included: deployctl deploy --include=./static --entrypoint=./main.ts The entrypoint can also be a remote script. A common use case for this is to deploy an static site using std/http/file_server.ts (more details in https://docs.deno.com/deploy/tutorials/static-site ): deployctl deploy --entrypoint=jsr:@std/http/file_server You can set env variables for deployments to have access using Deno.env. You can use --env to set individual environment variables, or --env-file to load one or more environment files. These options can be combined and used multiple times: deployctl deploy --env-file --env-file=.other-env --env=DEPLOYMENT_TS=$(date +%s) Be aware that the env variables set with --env and --env-file are merged with the env variables configured for the project. If this does not suit your needs, please report your feedback at https://github.com/denoland/deploy_feedback/issues/ USAGE: deployctl deploy [OPTIONS] [] OPTIONS: --exclude= Prevent the upload of these comma-separated paths. Can be used multiple times. Globs are supported --include= Only upload files in these comma-separated paths. Can be used multiple times. Globs are supported --import-map= Path to the import map file to use. -h, --help Prints this help information --prod Create a production deployment (default is preview deployment except the first deployment) -p, --project= The project in which to deploy. If it does not exist yet, it will be created (see --org). --org= The organization in which to create the project. Defaults to the user's personal organization --entrypoint= The file that Deno Deploy will run. Also available as positional argument, which takes precedence --env= Set individual environment variables in a KEY=VALUE format. Can be used multiple times --env-file[=FILE] Set environment variables using a dotenv file. If the file name is not provided, defaults to '.env'. Can be used multiple times --token= The API token to use (defaults to DENO_DEPLOY_TOKEN env var) --dry-run Dry run the deployment process. --config= Path to the file from where to load DeployCTL config. Defaults to 'deno.json' --save-config Persist the arguments used into the DeployCTL config file --color= Enable or disable colored output. Defaults to 'auto' (colored when stdout is a tty) `; export interface Args { help: boolean; static: boolean; prod: boolean; exclude: string[]; include: string[]; token: string | null; project: string | null; org?: string; entrypoint: string | null; importMap: string | null; dryRun: boolean; config: string | null; saveConfig: boolean; } export default async function (rawArgs: RawArgs): Promise { const positionalEntrypoint: string | null = typeof rawArgs._[0] === "string" ? rawArgs._[0] : null; const args: Args = { help: !!rawArgs.help, static: !!rawArgs.static, prod: !!rawArgs.prod, token: rawArgs.token ? String(rawArgs.token) : null, project: rawArgs.project ? String(rawArgs.project) : null, org: rawArgs.org, entrypoint: positionalEntrypoint !== null ? positionalEntrypoint : rawArgs["entrypoint"] ? String(rawArgs["entrypoint"]) : null, importMap: rawArgs["import-map"] ? String(rawArgs["import-map"]) : null, exclude: rawArgs.exclude.flatMap((e) => e.split(",")), include: rawArgs.include.flatMap((i) => i.split(",")), dryRun: !!rawArgs["dry-run"], config: rawArgs.config ? String(rawArgs.config) : null, saveConfig: !!rawArgs["save-config"], }; if (args.help) { console.log(help); Deno.exit(0); } if (args.entrypoint === null) { error( "Unable to guess the entrypoint of this project. Use the --entrypoint argument to provide one.", ); } if (rawArgs._.length > 1) { error("Too many positional arguments given."); } if (args.project === null) { error( "Unable to guess a project name for this project. Use the --project argument to provide one.", ); } const opts = { entrypoint: args.entrypoint, importMapUrl: args.importMap === null ? null : await parseEntrypoint(args.importMap, undefined, "import map") .catch((e) => error(e)), static: args.static, prod: args.prod, token: args.token, project: args.project, org: args.org, include: args.include, exclude: args.exclude, dryRun: args.dryRun, config: args.config, saveConfig: args.saveConfig, envVars: await envVarsFromArgs(rawArgs), }; await deploy(opts); } interface DeployOpts { entrypoint: string; importMapUrl: URL | null; static: boolean; prod: boolean; exclude: string[]; include: string[]; token: string | null; project: string; org?: string; dryRun: boolean; config: string | null; saveConfig: boolean; envVars: Record | null; } async function deploy(opts: DeployOpts): Promise { let url = await parseEntrypoint(opts.entrypoint).catch(error); if (opts.dryRun) { wait("").start().info("Performing dry run of deployment"); } const projectInfoSpinner = wait( `Fetching project '${opts.project}' information...`, ).start(); const api = opts.token ? API.fromToken(opts.token) : API.withTokenProvisioner(TokenProvisioner); let projectIsEmpty = false; let project = await api.getProject(opts.project); if (project === null) { const org = opts.org ? await organization.getByNameOrCreate(api, opts.org) : null; projectInfoSpinner.stop(); const projectCreationSpinner = wait( `Project '${opts.project}' not found. Creating...`, ).start(); try { project = await api.createProject(opts.project, org?.id); } catch (e) { error(e); } projectCreationSpinner.succeed(`Created new project '${opts.project}'.`); wait({ text: "", indent: 3 }).start().info( `You can configure the name, env vars, custom domains and more in ${endpoint()}/projects/${project.name}/settings`, ); projectIsEmpty = true; } else { if (opts.org && project.organization.name === null) { projectInfoSpinner.fail( `The project is in your personal organization and you requested the org '${opts.org}' in the args`, ); Deno.exit(1); } else if (opts.org && project.organization.name !== opts.org) { projectInfoSpinner.fail( `The project is in the organization '${project.organization.name}' and you requested the org '${opts.org}' in the args`, ); Deno.exit(1); } const buildsPage = await api.listDeployments(project.id, 0, 1); if (buildsPage === null) { projectInfoSpinner.fail("Project deployments details not found."); return Deno.exit(1); } projectInfoSpinner.succeed(`Deploying to project ${project.name}.`); if (buildsPage.list.length === 0) { projectIsEmpty = true; } } if (projectIsEmpty) { opts.prod = true; wait({ text: "", indent: 3 }).start().info( "The project does not have a deployment yet. Automatically pushing initial deployment to production (use --prod for further updates).", ); } const cwd = Deno.cwd(); if (url.protocol === "file:") { const path = fromFileUrl(url); if (!path.startsWith(cwd)) { wait("").start().fail(`Entrypoint: ${path}`); error("Entrypoint must be in the current working directory."); } else { wait("").start().succeed(`Entrypoint: ${path}`); } const entrypoint = path.slice(cwd.length); url = new URL(`file:///src${entrypoint}`); } let importMapUrl = opts.importMapUrl; if (importMapUrl && importMapUrl.protocol === "file:") { const path = fromFileUrl(importMapUrl); if (!path.startsWith(cwd)) { error("Import map must be in the current working directory."); } const entrypoint = path.slice(cwd.length); importMapUrl = new URL(`file:///src${entrypoint}`); } let uploadSpinner: Spinner | null = null; const files = []; let manifest: { entries: Record } | undefined = undefined; if (opts.static) { wait("").start().info(`Uploading all files from the current dir (${cwd})`); const assetSpinner = wait("Finding static assets...").start(); const include = opts.include.map(convertPatternToRegExp); const exclude = opts.exclude.map(convertPatternToRegExp); const { manifestEntries: entries, hashPathMap: assets } = await walk( cwd, cwd, { include, exclude }, ); assetSpinner.succeed( `Found ${assets.size} asset${assets.size === 1 ? "" : "s"}.`, ); // If the import map is specified but not in the manifest, error out. if ( opts.importMapUrl !== null && !containsEntryInManifest( entries, relative(cwd, fromFileUrl(opts.importMapUrl)), ) ) { error( `Import map ${opts.importMapUrl} not found in the assets to be uploaded. Please check --include and --exclude options to make sure the import map is included.`, ); } // If the config file is present but not in the manifest, show a warning // that any import map settings in the config file will not be used. if ( opts.importMapUrl === null && opts.config !== null && !containsEntryInManifest( entries, relative(cwd, opts.config), ) ) { wait("").start().warn( yellow( `Config file ${opts.config} not found in the assets to be uploaded; any import map settings in the config file will not be applied during deployment. If this is not your intention, please check --include and --exclude options to make sure the config file is included.`, ), ); } uploadSpinner = wait("Determining assets to upload...").start(); const neededHashes = await api.projectNegotiateAssets(project.id, { entries, }); for (const hash of neededHashes) { const path = assets.get(hash); if (path === undefined) { error(`Asset ${hash} not found.`); } const data = await Deno.readFile(path); files.push(data); } if (files.length === 0) { uploadSpinner.succeed("No new assets to upload."); uploadSpinner = null; } else { uploadSpinner.text = `${files.length} new asset${ files.length === 1 ? "" : "s" } to upload.`; } manifest = { entries }; } if (opts.dryRun) { uploadSpinner?.succeed(uploadSpinner?.text); return; } let deploySpinner: Spinner | null = null; const req = { url: url.href, importMapUrl: importMapUrl ? importMapUrl.href : null, production: opts.prod, manifest, }; const progress = await api.pushDeploy(project.id, req, files); try { for await (const event of progress) { switch (event.type) { case "staticFile": { const percentage = (event.currentBytes / event.totalBytes) * 100; uploadSpinner!.text = `Uploading ${files.length} asset${ files.length === 1 ? "" : "s" }... (${percentage.toFixed(1)}%)`; break; } case "load": { if (uploadSpinner) { uploadSpinner.succeed( `Uploaded ${files.length} new asset${ files.length === 1 ? "" : "s" }.`, ); uploadSpinner = null; } if (deploySpinner === null) { deploySpinner = wait("Deploying...").start(); } const progress = event.seen / event.total * 100; deploySpinner.text = `Deploying... (${progress.toFixed(1)}%)`; break; } case "uploadComplete": deploySpinner!.text = `Finishing deployment...`; break; case "success": { let domains; if (opts.envVars) { deploySpinner!.text = "Setting environment variables..."; // Hack while Deno Deploy implements settings env variables during deployment_with_assets const redeployed = await api.redeployDeployment(event.id, { prod: opts.prod, env_vars: opts.envVars, }); // NULL SAFETY: deployment was just created domains = redeployed!.domains; await api.deleteDeployment(event.id); } else { domains = event.domainMappings.map((m) => m.domain); } const deploymentKind = opts.prod ? "Production" : "Preview"; deploySpinner!.succeed(`${deploymentKind} deployment complete.`); // We want to store the project id even if user provided project name // to facilitate project renaming. opts.project = project.id; await configFile.maybeWrite(opts.config, opts, opts.saveConfig); console.log("\nView at:"); for (const domain of domains) { console.log(` - https://${domain}`); } break; } case "error": if (uploadSpinner) { uploadSpinner.fail(`Upload failed.`); uploadSpinner = null; } if (deploySpinner) { deploySpinner.fail(`Deployment failed.`); deploySpinner = null; } error(event.ctx); } } } catch (err: unknown) { if (err instanceof APIError) { if (uploadSpinner) { uploadSpinner.fail(`Upload failed.`); uploadSpinner = null; } if (deploySpinner) { deploySpinner.fail(`Deployment failed.`); deploySpinner = null; } error(err.toString()); } error(String(err)); } } ================================================ FILE: src/subcommands/deployments.ts ================================================ import type { Args } from "../args.ts"; import { API, endpoint } from "../utils/api.ts"; import TokenProvisioner from "../utils/access_token.ts"; import { envVarsFromArgs } from "../utils/env_vars.ts"; import { wait } from "../utils/spinner.ts"; import type { Build, BuildsPage, Cron, Database, DeploymentProgressError, Organization, Project, } from "../utils/api_types.ts"; import { bold, cyan, green, magenta, red, stripAnsiCode, yellow, } from "@std/fmt/colors"; import type { Spinner } from "@denosaurs/wait"; import * as tty from "@denosaurs/tty"; import { fromFileUrl } from "@std/path/from_file_url"; import { error } from "../error.ts"; import { renderCron } from "../utils/crons.ts"; import { renderTimeDelta } from "../utils/time.ts"; const help = `Manage deployments in Deno Deploy ## SHOW The "deployments show" subcommand is used to see all the details of a deployment. The simplest form of the command will show the details of the production deployment of the project you are currently in (project will be picked up from the config file): deployctl deployments show And you can also navigate the list of deployments using --prev and --next. --prev will show you 1 deployment before the current production deployment: deployctl deployments show --prev To see the deployment before that, you can either add another --prev, or use --prev=2: deployctl deployments show --prev --prev You can also see the production deployment of any project using --project: deployctl deployments show --project=my-other-project Or just show the details of a specific deployment, of any project, using --id. This can also be combined with --prev and --next too: deployctl deployments show --id=p63c39ck5feg --next ## List The "deployments list" subcommand is used to list the deployments of a project. The simplest form of the command will list the first 20 deployments of the project you are currently in (project will be picked up from the config file): deployctl deployments list You can list the rest of the deployments using --page: deployctl deployments list --page=2 You can specify the project to list deployments of with the --project option: deployctl deployments list --project=my-other-project ## Redeploy The "deployments redeploy" subcommand creates a new deployment reusing the build of an existing deployment. One important principle to understand when using Deno Deploy is that deployments are immutable. This includes the source code but also the env vars, domain mappings*, the KV database, crons, etc. To change any of these associated resources for an existing deployment, you must redeploy it. For example, to promote a preview deployment to production, use the --prod option: deployctl deployments redeploy --prod If this is a GitHub deployment, it will have 2 databases, one for prod deployments and one for preview deployments. When promoting a preview deployment to prod, by default it will automatically switch also to the prod database. You can control the database with the --db option: deployctl deployments redeploy --prod --db=preview If your organization has custom databases, you can also set them by UUID: deployctl deployments redeploy --db=5261e096-f9aa-4b72-8440-1c2b5b553def Lastly, environment variables can also be changed using the redeploy functionality. You can use --env to set individual environment variables, or --env-file to load one or more environment files: deployctl deployments redeploy --env-file --env-file=.other-env --env=DEPLOYMENT_TS=$(date +%s) Be aware that when changing env variables, only the env variables set during the redeployment will be used by the new deployment. Currently the project env variables are ignored during redeployment. If this does not suit your needs, please report your feedback at https://github.com/denoland/deploy_feedback/issues/ USAGE: deployctl deployments [OPTIONS] SUBCOMMANDS: show [ID] View details of a deployment. Specify the deployment with a positional argument or the --id option; otherwise, it will show the details of the current production deployment of the project specified in the config file or with the --project option. Use --next and --prev to fetch the deployments deployed after or before the specified (or production) deployment. list List the deployments of a project. Specify the project using --project. Pagination can be controlled with --page and --limit. delete [ID] Delete a deployment. Same options to select the deployment as the show subcommand apply (--id, --project, --next and --prev). redeploy [ID] Create a new deployment reusing the build of an existing deployment. You can change various resources associated with the original deployment using the options --prod, --db, --env and --env-file OPTIONS: -h, --help Prints this help information --id= [show,delete,redeploy] Select a deployment by id. -p, --project= [show,delete,redeploy] Select the production deployment of a project. Ignored if combined with --id [list] The project of which to list deployments. --next[=pos] [show,delete,redeploy] Modifier that selects a deployment deployed chronologically after the deployment selected with --id or --project Can be used multiple times (--next --next is the same as --next=2) --prev[=pos] [show,delete,redeploy] Modifier that selects a deployment deployed chronologically before the deployment selected with --id or --project Can be used multiple times (--prev --prev is the same as --prev=2) --page= [list] Page of the deployments list to fetch --limit= [list] Amount of deployments to include in the list --prod [redeploy] Set the production domain mappings to the new deployment. If the project has prod/preview databases and --db is not set this option also controls which database the new deployment uses. --db= [redeploy] Set the database of the new deployment. If not set, will use the preview database if it is a preview deployment and the project has a preview database, or production otherwise. --env= [redeploy] Set individual environment variables in a KEY=VALUE format. Can be used multiple times --env-file[=FILE] [redeploy] Set environment variables using a dotenv file. If the file name is not provided, defaults to '.env'. Can be used multiple times. --format= Output the deployment details in an overview or JSON-encoded. Defaults to 'overview' when stdout is a tty, and 'json' otherwise. --token= The API token to use (defaults to DENO_DEPLOY_TOKEN env var) --config= Path to the file from where to load DeployCTL config. Defaults to 'deno.json' --color= Enable or disable colored output. Defaults to 'auto' (colored when stdout is a tty) --force [delete] Automatically execute the command without waiting for confirmation. `; export default async function (args: Args): Promise { if (args.help) { console.log(help); Deno.exit(0); } const subcommand = args._.shift(); switch (subcommand) { case "list": await listDeployments(args); break; case "show": await showDeployment(args); break; case "delete": await deleteDeployment(args); break; case "redeploy": await redeployDeployment(args); break; default: console.error(help); Deno.exit(1); } } async function listDeployments(args: Args): Promise { if (!args.project) { error( "No project specified. Use --project to specify the project of which to list the deployments", ); } const relativeNext = args.next.reduce( (prev, next) => prev + parseInt(next || "1"), 0, ); if (Number.isNaN(relativeNext)) { error("Value of --next must be a number"); } const relativePrev = args.prev.reduce( (prev, next) => prev + parseInt(next || "1"), 0, ); if (Number.isNaN(relativePrev)) { error("Value of --prev must be a number"); } // User-facing page is 1-based. Paging in API is 0-based. const page = parseInt(args.page || "1") + relativeNext - relativePrev; if (Number.isNaN(page)) { error("Value of --page must be a number"); } if (page < 1) { error(`The page cannot be lower than 1. You asked for page '${page}'`); } const apiPage = page - 1; const limit = args.limit ? parseInt(args.limit) : undefined; if (Number.isNaN(limit)) { error("Value of --limit must be a number"); } let format: "overview" | "json"; switch (args.format) { case "overview": case "json": format = args.format; break; case undefined: format = Deno.stdout.isTerminal() ? "overview" : "json"; break; default: error( `Invalid format '${args.format}'. Supported values for the --format option are 'overview' or 'json'`, ); } const spinner = wait( `Fetching page ${page} of the list of deployments of project '${args.project}'...`, ) .start(); const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); const [buildsPage, project, databases] = await Promise.all([ api.listDeployments( args.project, apiPage, limit, ), api.getProject(args.project), api.getProjectDatabases(args.project), ]); if (!buildsPage || !project || !databases) { spinner.fail( `The project '${args.project}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } spinner.succeed( `Page ${page} of the list of deployments of the project '${args.project}' is ready`, ); if (buildsPage.list.length === 0) { wait("").warn(`Page '${page}' is empty`); return; } switch (format) { case "overview": renderListOverview( api, project, databases, buildsPage, ); break; case "json": console.log(JSON.stringify(buildsPage.list)); break; } } // TODO: Show if active (and maybe some stats?) async function showDeployment(args: Args): Promise { const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); let [deploymentId, projectId, build, project]: [ string, string | undefined, Build | null | undefined, Project | null | undefined, ] = await resolveDeploymentId( args, api, ); let databases: Database[] | null; let crons: Cron[] | null; const spinner = wait(`Fetching deployment '${deploymentId}' details...`) .start(); // Need to fetch project because the build.project does not include productionDeployment [build, project, databases, crons] = projectId ? await Promise.all([ build ? Promise.resolve(build) : api.getDeployment(deploymentId), project ? Promise.resolve(project) : api.getProject(projectId), api.getProjectDatabases(projectId), api.getDeploymentCrons(projectId, deploymentId), ]) : await api.getDeployment(deploymentId).then(async (build) => build ? [ build, ...await Promise.all([ api.getProject(build.project.id), api.getProjectDatabases(build.project.id), api.getDeploymentCrons(build.project.id, deploymentId), ]), ] : [null, null, null, null] ); if (!build) { spinner.fail( `The deployment '${deploymentId}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } if (!project) { spinner.fail( `The project '${projectId}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } if (!databases) { spinner.fail( `Failed to fetch the databases of project '${projectId}'`, ); return Deno.exit(1); } if (!crons) { spinner.fail( `Failed to fetch the crons of project '${projectId}'`, ); return Deno.exit(1); } let organization = project.organization; if (!organization.name && !organization.members) { // project.organization does not incude members array, and we need it for naming personal orgs organization = await api.getOrganizationById(organization.id); } spinner.succeed( `The details of the deployment '${build.deploymentId}' are ready:`, ); let format: "overview" | "json"; switch (args.format) { case "overview": case "json": format = args.format; break; case undefined: format = Deno.stdout.isTerminal() ? "overview" : "json"; break; default: error( `Invalid format '${args.format}'. Supported values for the --format option are 'overview' or 'json'`, ); } switch (format) { case "overview": renderShowOverview(build, project, organization, databases, crons); break; case "json": renderShowJson(build, project, organization, databases, crons); break; } } async function deleteDeployment(args: Args): Promise { const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); const [deploymentId, _projectId, _build, _project] = await resolveDeploymentId( args, api, ); const confirmation = args.force ? true : confirm( `${ magenta("?") } Are you sure you want to delete the deployment '${deploymentId}'?`, ); if (!confirmation) { wait("").fail("Delete canceled"); return; } const spinner = wait(`Deleting deployment '${deploymentId}'...`).start(); const deleted = await api.deleteDeployment(deploymentId); if (deleted) { spinner.succeed(`Deployment '${deploymentId}' deleted successfully`); } else { spinner.fail( `Deployment '${deploymentId}' not found, or you don't have access to it`, ); } } async function redeployDeployment(args: Args): Promise { const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); let [deploymentId, mProjectId, mBuild, mProject]: [ string, string | undefined, Build | null | undefined, Project | null | undefined, ] = await resolveDeploymentId( args, api, ); const spinnerPrep = wait(`Preparing redeployment of '${deploymentId}'...`) .start(); let mDatabases; [mBuild, mProject, mDatabases] = await Promise.all([ mBuild ? Promise.resolve(mBuild) : api.getDeployment(deploymentId), mProject === undefined && mProjectId ? api.getProject(mProjectId) : undefined, mProjectId ? api.getProjectDatabases(mProjectId) : undefined, ]); if (!mBuild) { spinnerPrep.fail( `The deployment '${deploymentId}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } const build = mBuild; const projectId = build.project.id; if (mProject === undefined || mDatabases === undefined) { // We didn't have projectId before. Now we do [mProject, mDatabases] = await Promise.all([ mProject ? Promise.resolve(mProject) : api.getProject(projectId), mDatabases ? Promise.resolve(mDatabases) : api.getProjectDatabases(projectId), ]); } if (!mProject) { spinnerPrep.fail( `The project '${projectId}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } const project = mProject; const databases = mDatabases; const alreadyProd = project.productionDeployment?.deploymentId === build.deploymentId; const prod = args.prod ?? alreadyProd; const prodDatabase = databases?.find((database) => deploymentDatabaseEnv(project, database) === "Production" ); const previewDatabase = databases?.find((database) => deploymentDatabaseEnv(project, database) === "Preview" ); const db = resolveDatabase( spinnerPrep, args, prod, project, prodDatabase, previewDatabase, ); const envVarsToAdd = await envVarsFromArgs(args) || {}; const addedEnvs = Object.keys(envVarsToAdd); // If the redeployment sets some env vars, the remaining env vars in the deployment are deleted const envVarsToRemove = build.deployment && addedEnvs.length > 0 ? Object.fromEntries( build.deployment.envVars .filter((env) => !addedEnvs.includes(env)) // HOME is always set by Deno Deploy .filter((env) => env !== "HOME") .map((key) => [key, null]), ) : {}; const removedEnvs = Object.keys(envVarsToRemove); const envVars = { ...envVarsToAdd, ...envVarsToRemove, }; spinnerPrep.succeed( `Redeployment of deployment '${deploymentId}' is ready to begin:`, ); const domainMappingDescription = prod ? "The new deployment will be the new production deployment" : "The new deployment will be a preview deployment"; wait({ text: "", indent: 3 }).start().info(domainMappingDescription); if (db) { const dbTag = db === prodDatabase?.databaseId ? "production" : db === previewDatabase?.databaseId ? "preview" : "custom"; wait({ text: "", indent: 3 }).start().info( `The new deployment will use the ${dbTag} database '${db}'`, ); } if (addedEnvs.length === 1) { wait({ text: "", indent: 3 }).start().info( `The new deployment will use the env variable ${addedEnvs[0]}`, ); } else if (addedEnvs.length > 1) { wait({ text: "", indent: 3 }).start().info( `The new deployment will use the env variables ${ addedEnvs.slice(0, -1).join(", ") } and ${addedEnvs.at(-1)}`, ); } if (removedEnvs.length === 1) { wait({ text: "", indent: 3 }).start().info( `The new deployment will stop using the env variable ${removedEnvs[0]}`, ); } else if (removedEnvs.length > 1) { wait({ text: "", indent: 3 }).start().info( `The new deployment will stop using the env variables ${ removedEnvs.slice(0, -1).join(", ") } and ${removedEnvs.at(-1)}`, ); } const spinner = wait(`Redeploying deployment '${deploymentId}'...`).start(); const params = { prod, env_vars: envVars, databases: db ? { default: db } : undefined, }; const redeployed = await api.redeployDeployment(deploymentId, params); if (redeployed) { spinner.succeed( `Deployment '${deploymentId}' redeployed as '${redeployed.id}' successfully`, ); } else { spinner.fail( `Deployment '${deploymentId}' not found, or you don't have access to it`, ); } } async function searchRelativeDeployment( deployments: AsyncGenerator, deploymentId: string, relativePos: number, ): Promise { const buffer = []; for await (const build of deployments) { if (relativePos === 0) { if (build.deploymentId === deploymentId) { return build; } } if (relativePos > 0) { if (build.deploymentId === deploymentId) { return buffer.pop(); } } if (relativePos < 0) { if (buffer.pop()?.deploymentId === deploymentId) { return build; } } buffer.unshift(build); // Truncates array at given length buffer.length = Math.abs(relativePos); } } function renderShowOverview( build: Build, project: Project, organization: Organization, databases: Database[], crons: Cron[], ) { const organizationName = organization.name && cyan(organization.name) || `${cyan(organization.members[0].user.name)} [personal]`; const buildError = deploymentError(build)?.ctx.replaceAll(/\s+/g, " "); const status = deploymentStatus(project, build); const coloredStatus = status === "Failed" ? red(bold(status.toUpperCase())) : status === "Pending" ? yellow(status) : status === "Production" ? green(bold(status)) : status; const database = deploymentDatabase(databases, build); const databaseEnv = database ? `${ greenProd(deploymentDatabaseEnv(project, database)) } (${database.databaseId})` : "n/a"; const entrypoint = deploymentEntrypoint(build); const domains = build.deployment?.domainMappings.map((domain) => `https://${domain.domain}`) .sort((a, b) => a.length - b.length) ?? []; if (domains.length === 0) { domains.push("n/a"); } console.log(); console.log(bold(build.deploymentId)); console.log(new Array(build.deploymentId.length).fill("-").join("")); console.log(`Status:\t\t${coloredStatus}`); if (buildError) { console.log(`Error:\t\t${buildError}`); } console.log( `Date:\t\t${deploymentRelativeDate(build)} ago (${ deploymentLocaleDate(build) })`, ); if ( build.deployment?.description && build.deployment.description !== build.relatedCommit?.message ) { console.log(`Description:\t${build.deployment.description}`); } console.log(`Project:\t${magenta(project.name)} (${project.id})`); console.log( `Organization:\t${organizationName} (${project.organizationId})`, ); console.log( `Domain(s):\t${domains.join("\n\t\t")}`, ); console.log(`Database:\t${databaseEnv}`); console.log(`Entrypoint:\t${entrypoint}`); console.log( `Env Vars:\t${build.deployment?.envVars.join("\n\t\t") ?? "n/a"}`, ); if (build.relatedCommit) { console.log(`Git`); console.log( ` Ref:\t\t${cyan(build.relatedCommit.branch ?? "??")} [${ build.relatedCommit.hash.slice(0, 7) }]`, ); console.log( ` Message:\t${build.relatedCommit.message.split("\n")[0]}`, ); console.log( ` Author:\t${build.relatedCommit.authorName} @${ magenta(build.relatedCommit.authorGithubUsername) } [mailto:${cyan(build.relatedCommit.authorEmail)}]`, ); console.log(` Url:\t\t${build.relatedCommit.url}`); } // The API only shows the data of the cron in the production deployment regardless of the deployment queried if (status === "Production" && crons.length > 0) { console.log( `Crons:\t\t${crons.map(renderCron).join("\n\t\t")}`, ); } } function renderShowJson( build: Build, project: Project, organization: Organization, databases: Database[], crons: Cron[], ) { console.log( JSON.stringify({ build, project, organization, databases, crons }), ); } async function renderListOverview( api: API, project: Project, databases: Database[], buildsPage: BuildsPage, ) { const sld = new URL(endpoint()).hostname.split(".").at(-2); for (;;) { const table = buildsPage.list.map((build) => { const status = deploymentStatus(project, build); const colorByStatus = (s: string) => status === "Failed" ? red(stripAnsiCode(s)) : status === "Production" ? green(s) : status === "Pending" ? yellow(s) : s; const database = deploymentDatabase(databases, build); const databaseEnv = database ? greenProd(deploymentDatabaseEnv(project, database)) : "n/a"; const relativeDate = stripAnsiCode( deploymentRelativeDate(build).split(", ")[0].trim(), ); const date = `${deploymentLocaleDate(build)} (${relativeDate})`; const row = { Deployment: colorByStatus(build.deploymentId), Date: colorByStatus(date), Status: colorByStatus(status), Database: colorByStatus(databaseEnv), Domain: colorByStatus( !isReady(status) ? "n/a" : `https://${project.name}-${build.deploymentId}.${sld}.dev`, ), Entrypoint: colorByStatus(deploymentEntrypoint(build)), ...build.relatedCommit ? { Branch: colorByStatus(build.relatedCommit.branch ?? "??"), Commit: colorByStatus(build.relatedCommit.hash.slice(0, 7)), } : {}, }; return row; }); renderTable(table); if (buildsPage.paging.page + 1 >= buildsPage.paging.totalPages) { return; } alert(`Press enter to fetch the next page`); tty.goUpSync(1, Deno.stdout); tty.clearDownSync(Deno.stdout); const nextPage = buildsPage.paging.page + 1; const spinner = wait( `Fetching page ${ // API page param is 0-based nextPage + 1} of the list of deployments of project '${project.name}'...`, ) .start(); const buildsNextPage = await api.listDeployments( project.id, nextPage, buildsPage.paging.limit, ); if (!buildsNextPage) { spinner.fail( `The project '${project.name}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } buildsPage = buildsNextPage; spinner.succeed( `Page ${ buildsPage.paging.page + 1 } of the list of deployments of the project '${project.name}' is ready`, ); } } function isCurrentProd(project: Project, build: Build): boolean { return project.productionDeployment?.id === build.id; } function deploymentError(build: Build): DeploymentProgressError | undefined { return build.logs.find((log): log is DeploymentProgressError => log.type === "error" ); } function deploymentStatus( project: Project, build: Build, ): DeploymentStatus { const isError = deploymentError(build) !== undefined; const isPending = !isError && (build.deployment === null || build.deployment.domainMappings.length === 0); return isError ? "Failed" : isPending ? "Pending" : isCurrentProd(project, build) ? "Production" : "Preview"; } function isReady(status: DeploymentStatus): boolean { return ["Production", "Preview"].includes(status); } function deploymentDatabase( databases: Database[], build: Build, ): Database | undefined { return databases.find((db) => db.databaseId === build.deployment?.kvDatabases["default"] ); } function deploymentLocaleDate(build: Build): string { return new Date(build.createdAt).toLocaleString(navigator.language, { timeZoneName: "short", }); } function deploymentRelativeDate(build: Build): string { const createdAt = new Date(build.createdAt); return renderTimeDelta(new Date().getTime() - createdAt.getTime()); } function deploymentEntrypoint(build: Build): string { return build.deployment ? build.deployment.url.startsWith("https://") ? build.deployment.url : fromFileUrl(build.deployment.url).replace("/src/", "") : "n/a"; } function deploymentDatabaseEnv( project: Project, database: Database, ): DatabaseEnv { return project.git && project.git.productionBranch !== database!.branch ? "Preview" : "Production"; } function renderTable(table: Record[]) { const headers = []; for (const row of table) { for (const [i, key] of Object.keys(row).entries()) { headers[i] = key; } } const widths: number[] = []; for (const rowData of table) { for (const [i, value] of Object.values(rowData).entries()) { widths[i] = Math.max( widths[i] ?? 0, stripAnsiCode(value).length, headers[i].length, ); widths[i] = widths[i] + widths[i] % 2; } } const headerRow = headers.map((header, i) => { const pad = " ".repeat( Math.max(widths[i] - stripAnsiCode(header).length, 0) / 2, ); return `${pad}${header}${pad}`.padEnd(widths[i], " "); }).join(" \u2502 "); const divisor = "\u2500".repeat( widths.reduce((prev, next) => prev + next, 0) + (headers.length - 1) * 3, ); console.log(`\u250c\u2500${divisor}\u2500\u2510`); console.log(`\u2502 ${headerRow} \u2502`); console.log(`\u251c\u2500${divisor}\u2500\u2524`); for (const rowData of table) { const row = Array.from(Object.values(rowData).entries(), ([i, cell]) => { const pad = " ".repeat(widths[i] - stripAnsiCode(cell).length); return `${cell}${pad}`; }).join(" \u2502 "); console.log(`\u2502 ${row} \u2502`); } console.log(`\u2514\u2500${divisor}\u2500\u2518`); } async function resolveDeploymentId( args: Args, api: API, ): Promise< [DeploymentId, ProjectId | undefined, Build | undefined, Project | undefined] > { const deploymentIdArg = args._.shift()?.toString() || args.id; // Ignore --project if user also provided --id const projectIdArg = deploymentIdArg ? undefined : args.project; let deploymentId, projectId: string | undefined, build: Build | undefined, project: Project | undefined; if (deploymentIdArg) { deploymentId = deploymentIdArg; } else { // Default to showing the production deployment of the project or the last if (!projectIdArg) { error( "No deployment or project specified. Use --id or --project ", ); } projectId = projectIdArg; if (args.last) { const spinner = wait( `Searching the last deployment of project '${projectId}'...`, ).start(); const buildsPage = await api.listDeployments(projectId, 0, 1); if (!buildsPage) { spinner.fail( `The project '${projectId}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } if (buildsPage.list.length === 0) { spinner.fail( `The project '${projectId}' does not have any deployment yet`, ); return Deno.exit(1); } deploymentId = buildsPage.list[0].deploymentId; spinner.succeed( `The last deployment of the project '${projectId}' is '${deploymentId}'`, ); } else { const spinner = wait( `Searching the production deployment of project '${projectId}'...`, ).start(); const maybeProject = await api.getProject(projectId); if (!maybeProject) { spinner.fail( `The project '${projectId}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } project = maybeProject; if (!project.productionDeployment) { spinner.fail( `Project '${project.name}' does not have a production deployment. Use --id to specify the deployment to show`, ); return Deno.exit(1); } deploymentId = project.productionDeployment.deploymentId; spinner.succeed( `The production deployment of the project '${project.name}' is '${deploymentId}'`, ); } } if (args.prev.length !== 0 || args.next.length !== 0) { // Search the deployment relative to the specified deployment if (!projectId) { // Fetch the deployment specified with --id, to know of which project to search the relative deployment // If user didn't use --id, they must have used --project, thus we already know the project-id const spinner_ = wait(`Fetching deployment '${deploymentId}'...`) .start(); const specifiedDeployment = await api.getDeployment(deploymentId); if (!specifiedDeployment) { spinner_.fail( `The deployment '${deploymentId}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } spinner_.succeed(`Deployment '${deploymentId}' found`); projectId = specifiedDeployment.project.id; } let relativePos = 0; for (const prev of args.prev) { relativePos -= parseInt(prev || "1"); } for (const next of args.next) { relativePos += parseInt(next || "1"); } if (Number.isNaN(relativePos)) { error("Value of --next and --prev must be a number"); } const relativePosString = relativePos.toLocaleString(navigator.language, { signDisplay: "exceptZero", }); const spinner = wait( `Searching the deployment ${relativePosString} relative to '${deploymentId}'...`, ).start(); const maybeBuild = await searchRelativeDeployment( api.listAllDeployments(projectId), deploymentId, relativePos, ); if (!maybeBuild) { spinner.fail( `The deployment '${deploymentId}' does not have a deployment ${relativePosString} relative to it`, ); return Deno.exit(1); } build = maybeBuild; spinner.succeed( `The deployment ${relativePosString} relative to '${deploymentId}' is '${build.deploymentId}'`, ); deploymentId = build.deploymentId; } return [deploymentId, projectId, build, project]; } function resolveDatabase( spinner: Spinner, args: Args, prod: boolean, project: Project, prodDatabase: Database | undefined, previewDatabase: Database | undefined, ): string | undefined { let db; switch (args.db?.toLowerCase().trim()) { case "prod": case "production": { if (!prodDatabase) { spinner.fail( `Project '${project.name}' does not have a production database`, ); return Deno.exit(1); } db = prodDatabase.databaseId; break; } case "preview": { if (!previewDatabase) { spinner.fail( `Project '${project.name}' does not have a preview database`, ); return Deno.exit(1); } db = previewDatabase.databaseId; break; } default: db = args.db; } if (!db) { // For GitHub deployments, Deploy assigns the branch database also during redeployment // Unless the user is explicit about the db, we want to maintain the invariant status == databaseEnv if (prod) { db = prodDatabase?.databaseId; } else { db = previewDatabase?.databaseId; } } return db; } function greenProd(s: "Production" | string): string { return s === "Production" ? green(s) : s; } type DeploymentStatus = "Failed" | "Pending" | "Production" | "Preview"; type DatabaseEnv = "Production" | "Preview"; type DeploymentId = string; type ProjectId = string; ================================================ FILE: src/subcommands/logs.ts ================================================ // Copyright 2021 Deno Land Inc. All rights reserved. MIT license. import type { Args } from "../args.ts"; import { wait } from "../utils/spinner.ts"; import { error } from "../error.ts"; import { API, APIError } from "../utils/api.ts"; import type { Project } from "../utils/api_types.ts"; import TokenProvisioner from "../utils/access_token.ts"; const help = `deployctl logs View logs for the given project. It supports both live logs where the logs are streamed to the console as they are generated, and query persisted logs where the logs generated in the past are fetched. To show the live logs of a project's latest deployment: deployctl logs --project=helloworld deployctl logs helloworld To show the live logs of a particular deployment: deployctl logs --project=helloworld --deployment=1234567890ab To show the live error & info level logs of the production deployment generated in particular regions: deployctl logs --project=helloworld --prod --levels=error,info --regions=region1,region2 To show the logs generated within the past two hours, up until 30 minutes ago, and containing the word "foo": [Linux] deployctl logs --project=helloworld --since=$(date -Iseconds --date='2 hours ago') --until=$(date -Iseconds --date='30 minutes ago') --grep=foo [macOS] deployctl logs --project=helloworld --since=$(date -Iseconds -v-2H) --until=$(date -Iseconds -v-30M) --grep=foo USAGE: deployctl logs [OPTIONS] [] OPTIONS: --deployment= The id of the deployment you want to get the logs (defaults to latest deployment) --prod Select the production deployment -p, --project=NAME The project you want to get the logs --token=TOKEN The API token to use (defaults to DENO_DEPLOY_TOKEN env var) --since= The start time of the logs you want to get. RFC3339 format (e.g. 2023-07-17T06:10:38+09:00) is supported. NOTE: Logs generated over 24 hours ago are not available --until= The end time of the logs you want to get. RFC3339 format (e.g. 2023-07-17T06:10:38+09:00) is supported. --grep= Filter logs by a word Multiple words can be specified for AND search. For example, "--grep=foo --grep=bar" will match logs containing both "foo" and "bar" --levels= Filter logs by log levels (defaults to all log levels) Mutliple levels can be specified, e.g. --levels=info,error --regions= Filter logs by regions (defaults to all regions) Multiple regions can be specified, e.g. --regions=region1,region2 --limit= Limit the number of logs to return (defualts to 100) This flag is effective only when --since and/or --until is specified `; export interface LogSubcommandArgs { help: boolean; prod: boolean; token: string | null; deployment: string | null; project: string | null; since: Date | null; until: Date | null; grep: string[]; levels: string[] | null; regions: string[] | null; limit: number; } type LogOptsBase = { prod: boolean; deploymentId: string | null; projectId: string; grep: string[]; levels: string[] | null; regions: string[] | null; }; type LiveLogOpts = LogOptsBase; type QueryLogOpts = LogOptsBase & { since: Date | null; until: Date | null; limit: number; }; export default async function (args: Args): Promise { const logSubcommandArgs = parseArgsForLogSubcommand(args); if (logSubcommandArgs.help) { console.log(help); Deno.exit(0); } if (logSubcommandArgs.project === null) { console.error(help); error("Missing project ID."); } if (args._.length > 1) { console.error(help); error("Too many positional arguments given."); } if (logSubcommandArgs.prod && logSubcommandArgs.deployment) { error( "You can't select a deployment and choose production flag at the same time", ); } if ( logSubcommandArgs.since !== null && logSubcommandArgs.until !== null && logSubcommandArgs.since >= logSubcommandArgs.until ) { error("--since must be earlier than --until"); } const api = logSubcommandArgs.token ? API.fromToken(logSubcommandArgs.token) : API.withTokenProvisioner(TokenProvisioner); const { regionCodes } = await api.getMetadata(); if (logSubcommandArgs.regions !== null) { const invalidRegions = getInvalidRegions( logSubcommandArgs.regions, regionCodes, ); if (invalidRegions.length > 0) { invalidRegionError(invalidRegions, regionCodes); } } const liveLogMode = logSubcommandArgs.since === null && logSubcommandArgs.until === null; if (liveLogMode) { await liveLogs(api, { prod: logSubcommandArgs.prod, deploymentId: logSubcommandArgs.deployment, projectId: logSubcommandArgs.project, grep: logSubcommandArgs.grep, levels: logSubcommandArgs.levels, regions: logSubcommandArgs.regions, }); } else { await queryLogs(api, { prod: logSubcommandArgs.prod, deploymentId: logSubcommandArgs.deployment, projectId: logSubcommandArgs.project, grep: logSubcommandArgs.grep, levels: logSubcommandArgs.levels, regions: logSubcommandArgs.regions, since: logSubcommandArgs.since, until: logSubcommandArgs.until, limit: logSubcommandArgs.limit, }); } } function getInvalidRegions( specifiedRegions: string[], availableRegions: string[], ): string[] { const invalidRegions = []; for (const r of specifiedRegions) { if (!availableRegions.includes(r)) { invalidRegions.push(r); } } return invalidRegions; } function invalidRegionError( invalidRegions: string[], availableRegions: string[], ): never { const invalid = `--regions contains invalid region(s): ${ invalidRegions.join(", ") }`; const availableRegionsList = availableRegions.map((r) => `- ${r}`).join("\n"); const available = `HINT: Available regions are:\n${availableRegionsList}`; error(`${invalid}\n${available}`); } export function parseArgsForLogSubcommand(args: Args): LogSubcommandArgs { const DEFAULT_LIMIT = 100; const limit = args.limit ? parseInt(args.limit) : DEFAULT_LIMIT; let since: Date | null = null; if (args.since !== undefined) { since = new Date(args.since); if (Number.isNaN(since.valueOf())) { console.error(help); error("Invalid format found in --since"); } } let until: Date | null = null; if (args.until !== undefined) { until = new Date(args.until); if (Number.isNaN(until.valueOf())) { console.error(help); error("Invalid format found in --until"); } } let logLevels: string[] | null = null; if (args.levels !== undefined) { logLevels = args.levels.split(","); } let regions: string[] | null = null; if (args.regions !== undefined) { regions = args.regions.split(","); } let project: string | null = null; if (args.project !== undefined) { project = args.project; } else if (typeof args._[0] === "string") { project = args._[0]; } return { help: !!args.help, prod: !!args.prod, token: args.token ? String(args.token) : null, deployment: args.deployment ? String(args.deployment) : null, project, since, until, grep: args.grep, levels: logLevels, regions, limit: Number.isNaN(limit) ? DEFAULT_LIMIT : limit, }; } async function fetchProjectInfo( api: API, projectId: string, onFailure: (msg: string) => never, ): Promise { const project = await api.getProject(projectId); if (project === null) { onFailure("Project not found."); } const projectDeployments = await api.listDeployments(projectId); if (projectDeployments === null) { onFailure("Project not found."); } return project; } async function liveLogs(api: API, opts: LiveLogOpts): Promise { const projectSpinner = wait("Fetching project information...").start(); const project = await fetchProjectInfo(api, opts.projectId, (msg) => { projectSpinner.fail(msg); Deno.exit(1); }); if (opts.prod) { if (!project.hasProductionDeployment) { projectSpinner.fail("This project doesn't have a production deployment"); Deno.exit(1); } opts.deploymentId = project.productionDeployment?.id ?? null; } projectSpinner.succeed(`Project: ${project.name}`); const logs = opts.deploymentId ? await api.getLogs(opts.projectId, opts.deploymentId) : await api.getLogs(opts.projectId, "latest"); if (logs === null) { projectSpinner.fail("Project not found."); Deno.exit(1); } try { for await (const log of logs) { if (log.type === "ready" || log.type === "ping") { continue; } if (opts.grep.some((word) => !log.message.includes(word))) { continue; } if (opts.levels !== null && !opts.levels.includes(log.level)) { continue; } if (opts.regions !== null && !opts.regions.includes(log.region)) { continue; } printLog(log.level, log.time, log.region, log.message); } } catch (err: unknown) { if ( err instanceof APIError ) { error(err.toString()); } } finally { console.log("%cconnection closed", "color: red"); } } async function queryLogs(api: API, opts: QueryLogOpts): Promise { const projectSpinner = wait("Fetching project information...").start(); const project = await fetchProjectInfo(api, opts.projectId, (msg) => { projectSpinner.fail(msg); Deno.exit(1); }); if (opts.prod) { if (!project.hasProductionDeployment) { projectSpinner.fail("This project doesn't have a production deployment"); Deno.exit(1); } opts.deploymentId = project.productionDeployment?.id ?? null; } projectSpinner.succeed(`Project: ${project.name}`); const logSpinner = wait("Fetching logs...").start(); try { const { logs } = await api.queryLogs( opts.projectId, opts.deploymentId ?? "latest", { regions: opts.regions ?? undefined, levels: opts.levels ?? undefined, since: opts.since?.toISOString(), until: opts.until?.toISOString(), q: opts.grep.length > 0 ? opts.grep : undefined, limit: opts.limit, }, ); if (logs.length === 0) { logSpinner.fail("No logs found matching the provided condition"); return; } logSpinner.succeed(`Found ${logs.length} logs`); for (const log of logs) { printLog(log.level, log.timestamp, log.region, log.message); } } catch (err: unknown) { logSpinner.fail("Failed to fetch logs"); if (err instanceof APIError) { error(err.toString()); } else { throw err; } } } function printLog( logLevel: string, timestamp: string, region: string, message: string, ) { const color = getLogColor(logLevel); console.log( `%c${timestamp} %c${region}%c ${message.trim()}`, "color: aquamarine", "background-color: grey", `color: ${color}`, ); } function getLogColor(logLevel: string) { switch (logLevel) { case "debug": { return "grey"; } case "error": { return "red"; } case "info": { return "blue"; } default: { return "initial"; } } } ================================================ FILE: src/subcommands/logs_test.ts ================================================ import { parseArgsForLogSubcommand } from "./logs.ts"; import { assertEquals, assertNotEquals, assertThrows } from "@std/assert"; import { parseArgs } from "../args.ts"; Deno.test("parseArgsForLogSubcommand", async (t) => { const parseHelper = (args: string[]) => { // For this test, the subcommand name should not be included in `args`. assertNotEquals(args.at(0), "logs"); try { return parseArgsForLogSubcommand(parseArgs(args)); } catch (e) { // Since Deno v1.44.0, when `Deno.exitCode` was introduced, test cases // with non-zero exit code has been treated as failure, causing some tests // to fail unexpectedly (not sure if this behavior change is intended). // To avoid this, we set `Deno.exitCode` to 0 before giving control back // to each test case. // https://github.com/denoland/deno/pull/23609 // deno-lint-ignore no-explicit-any if ((Deno as any).exitCode !== undefined) { // deno-lint-ignore no-explicit-any (Deno as any).exitCode = 0; } throw e; } }; await t.step("specify help", () => { const got = parseHelper(["--help"]); assertEquals(got, { help: true, prod: false, token: null, deployment: null, project: null, since: null, until: null, grep: [], levels: null, regions: null, limit: 100, }); }); await t.step("specify since and until", () => { const since = new Date(Date.now() - 3 * 60 * 60 * 1000); // 3 hours ago const until = new Date(Date.now() - 42 * 60 * 1000); // 42 minutes ago const got = parseHelper([ `--since=${since.toISOString()}`, `--until=${until.toISOString()}`, ]); assertEquals(got, { help: false, prod: false, token: null, deployment: null, project: null, since, until, grep: [], levels: null, regions: null, limit: 100, }); }); await t.step("specify invalid format in since", () => { assertThrows(() => parseHelper(["--since=INVALID"]), Error, "exit code: 1"); }); await t.step("specify invalid format in until", () => { assertThrows(() => parseHelper(["--until=INVALID"]), Error, "exit code: 1"); }); await t.step("complex args", () => { const until = new Date(Date.now() - 42 * 1000); // 42 seconds ago const got = parseHelper([ "--prod", "--token=abc", "--project=helloworld", `--until=${until.toISOString()}`, "--grep=こんにちは", "--levels=info,error", "--regions=region1,region2", "--limit=42", "--grep=hola", ]); assertEquals(got, { help: false, prod: true, token: "abc", deployment: null, project: "helloworld", since: null, until, grep: ["こんにちは", "hola"], levels: ["info", "error"], regions: ["region1", "region2"], limit: 42, }); }); await t.step("specify project name in a positional argument", () => { const got = parseHelper([ "--prod", "--token=abc", "project_name", ]); assertEquals(got, { help: false, prod: true, token: "abc", deployment: null, project: "project_name", since: null, until: null, grep: [], levels: null, regions: null, limit: 100, }); }); }); ================================================ FILE: src/subcommands/projects.ts ================================================ import type { Args } from "../args.ts"; import { API, APIError, endpoint } from "../utils/api.ts"; import TokenProvisioner from "../utils/access_token.ts"; import { wait } from "../utils/spinner.ts"; import type { Organization, Project } from "../utils/api_types.ts"; import { bold, green, magenta, red } from "@std/fmt/colors"; import { error } from "../error.ts"; import organization from "../utils/organization.ts"; import { renderCron } from "../utils/crons.ts"; import { stringify as stringifyError } from "../error.ts"; const help = `Manage projects in Deno Deploy USAGE: deployctl projects [OPTIONS] SUBCOMMANDS: list List the name of all the projects accessible by the user show [NAME] View details of a project. Specify the project using the positional argument or the --project option; otherwise, it will show the details of the project specified in the config file or try to guess it from the working context delete [NAME] Delete a project. Specify the project in the same way as the show subcommand create [NAME] Create a new project. Specify the project name in the same way as the show subcommand rename [OLD] Change the name of the project. Specify the project in the same way as the show subcommand OPTIONS: -h, --help Prints this help information -p, --project= The project selected. Can also be provided as positional argument --org= Specify an organization. When creating a project, defaults to the user's personal organization. When listing projects, use "personal" to filter by the personal organization. --token= The API token to use (defaults to DENO_DEPLOY_TOKEN env var) --config= Path to the file from where to load DeployCTL config. Defaults to 'deno.json' --color= Enable or disable colored output. Defaults to 'auto' (colored when stdout is a tty) --force Automatically execute the command without waiting for confirmation. `; export default async function (args: Args): Promise { if (args.help) { console.log(help); Deno.exit(0); } const subcommand = args._.shift(); switch (subcommand) { case "list": await listProjects(args); break; case "show": await showProject(args); break; case "delete": await deleteProject(args); break; case "create": await createProject(args); break; case "rename": await renameProject(args); break; default: console.error(help); Deno.exit(1); } } async function listProjects(args: Args): Promise { const spinner = wait("Fetching organizations and projects...").start(); const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); const orgs = (await api.listOrganizations()).filter((org) => args.org ? (org.name ? org.name === args.org : args.org.toLowerCase() === "personal") : true ); const data: [Organization, Project[]][] = await Promise.all( orgs.map(async (org) => [org, await api.listProjects(org.id)]), ); spinner.succeed("Organizations and projects data ready:"); data.sort(([_orga, projectsa], [_orgb, projectsb]) => projectsb.length - projectsa.length ); for (const [org, projects] of data) { if (projects.length === 0) continue; console.log(); console.log( org.name && `'${bold(magenta(org.name))}' org:` || "Personal org:", ); for (const project of projects) { console.log(` ${green(project.name)}`); } } } async function showProject(args: Args): Promise { const positionalArg = args._.shift(); if (positionalArg) { // Positional arguments supersedes --project flag args.project = positionalArg.toString(); } if (!args.project) { error( "No project specified. Use --project to specify the project of which to show the details", ); } const spinner = wait(`Fetching project '${args.project}'...`).start(); const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); const [project, domains, buildsPage, databases, crons] = await Promise.all([ api.getProject(args.project), api.getDomains(args.project), api.listDeployments(args.project), api.getProjectDatabases(args.project), api.getProjectCrons(args.project), ]).catch((err) => { if (err instanceof APIError && err.code === "projectNotFound") { return [null, null, null, null, null]; } throw err; }); if (!project || !domains || !buildsPage || !databases) { spinner.fail( `The project '${args.project}' does not exist, or you don't have access to it`, ); return Deno.exit(1); } const organizationName = project.organization.name ? magenta(project.organization.name) : `${ magenta( (await api.getOrganizationById(project.organization.id)).members[0] .user .name, ) } [personal]`; spinner.succeed(`Project '${args.project}' found`); console.log(); console.log(bold(project.name)); console.log(new Array(project.name.length).fill("-").join("")); console.log(`Organization:\t${organizationName} (${project.organizationId})`); const ingressRoot = new URL(endpoint()).hostname.split(".").at(-2); domains.push({ domain: `${project.name}.${ingressRoot}.dev`, isValidated: true, }); const validatedDomains = domains.filter((domain) => domain.isValidated); console.log( `Domain(s):\t${ validatedDomains.map((domain) => `https://${domain.domain}`).join( "\n\t\t", ) }`, ); console.log(`Dash URL:\t${endpoint()}/projects/${project.id}`); if (project.type === "playground") { console.log(`Playground:\t${endpoint()}/playground/${project.name}`); } if (project.git) { console.log( `Repository:\thttps://github.com/${project.git.repository.owner}/${project.git.repository.name}`, ); } if (databases.length > 0) { console.log( `Databases:\t${ databases.map((db) => `[${db.branch}] ${db.databaseId}`).join(`\n\t\t`) }`, ); } if (crons && crons.length > 0) { console.log( `Crons:\t\t${crons.map(renderCron).join("\n\t\t")}`, ); } if (buildsPage.list.length > 0) { console.log( `Deployments:${ buildsPage.list.map((build, i) => `${i !== 0 && i % 5 === 0 ? "\n\t\t" : "\t"}${ build.deployment ? project.productionDeployment?.deployment?.id === build.deployment.id ? `${magenta(build.deployment.id)}*` : build.deployment.id : `${red("✖")} (failed)` }` ).join("") }`, ); } } async function deleteProject(args: Args): Promise { const positionalArg = args._.shift(); if (positionalArg) { // Positional arguments supersedes --project flag args.project = positionalArg.toString(); } if (!args.project) { error( "No project specified. Use --project to specify the project to delete", ); } const fetchSpinner = wait(`Fetching project '${args.project}' details...`) .start(); const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); const project = await api.getProject(args.project); if (!project) { fetchSpinner.fail( `Project '${args.project}' not found, or you don't have access to it`, ); return Deno.exit(1); } fetchSpinner.succeed(`Project '${project.name}' (${project.id}) found`); const confirmation = args.force ? true : confirm( `${ magenta("?") } Are you sure you want to delete the project '${project.name}'?`, ); if (!confirmation) { wait("").fail("Delete canceled"); return; } const spinner = wait(`Deleting project '${args.project}'...`).start(); const deleted = await api.deleteProject(args.project); if (deleted) { spinner.succeed(`Project '${args.project}' deleted successfully`); } else { spinner.fail( `Project '${args.project}' not found, or you don't have access to it`, ); } } async function createProject(args: Args): Promise { const positionalArg = args._.shift(); if (positionalArg) { // Positional arguments supersedes --project flag args.project = positionalArg.toString(); } if (!args.project) { error( "No project specified. Use --project to specify the project to create", ); } const spinner = wait(`Creating project '${args.project}'...`).start(); const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); const org = args.org ? await organization.getByNameOrCreate(api, args.org) : null; try { await api.createProject(args.project, org?.id); spinner.succeed( `Project '${args.project}' created successfully ${ org ? `in organization '${org.name}'` : "" }`, ); } catch (error) { spinner.fail( `Cannot create the project '${args.project}': ${ stringifyError(error, { verbose: true }) }`, ); } } async function renameProject(args: Args): Promise { let currentId = args._.shift()?.toString(); let newName: string | null | undefined = args._.shift()?.toString(); if (currentId && !newName) { // Only required positional argument is the new name newName = currentId; currentId = undefined; } if (currentId) { // Positional arguments supersedes --project flag args.project = currentId; } if (!args.project) { error( "no project specified. Use --project to specify the project to rename", ); } const fetchSpinner = wait(`Fetching project '${args.project}' details...`) .start(); const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); const project = await api.getProject(args.project); if (!project) { fetchSpinner.fail( `Project ${args.project} not found, or you don't have access to it`, ); return Deno.exit(1); } const currentName = project.name; fetchSpinner.succeed(`Project '${currentName}' (${project.id}) found`); if (!newName) { newName = prompt(`${magenta("?")} New name for project '${currentName}':`); } if (!newName) { error("project name cannot be empty"); } const spinner = wait(`Renaming project '${currentName}' to '${newName}'...`) .start(); try { await api.renameProject(args.project, newName); spinner.succeed(`Project '${currentName}' renamed to '${newName}'`); } catch (error) { spinner.fail( `Cannot rename the project '${currentName}' to '${newName}': ${ stringifyError(error, { verbose: true }) }`, ); } } ================================================ FILE: src/subcommands/top.ts ================================================ // Copyright 2021 Deno Land Inc. All rights reserved. MIT license. import type { Args } from "../args.ts"; import { API } from "../utils/api.ts"; import TokenProvisioner from "../utils/access_token.ts"; import { wait } from "../utils/spinner.ts"; import * as tty from "@denosaurs/tty"; import { delay } from "@std/async/delay"; import { encodeHex } from "@std/encoding/hex"; import { error } from "../error.ts"; import type { ProjectStats } from "../utils/api_types.ts"; import { sha256 } from "../utils/hashing_encoding.ts"; import { stringify as stringifyError } from "../error.ts"; const help = ` Project monitoring (ALPHA) Definition of the table columns: idx Instance discriminator. Opaque id to discriminate different executions running in the same region. Deployment The id of the deployment running in the executing instance. Req/min Requests per minute received by the project. CPU% Percentage of CPU used by the project. CPU/req CPU time per request, in milliseconds. RSS/5min Max RSS used by the project during the last 5 minutes, in MB. Ingress/min Data received by the project per minute, in KB. Egress/min Data outputed by the project per minute, in KB. KVr/min KV reads performed by the project per minute. KVw/min KV writes performed by the project per minute. QSenq/min Queues enqueues performed by the project per minute. QSdeq/min Queues dequeues performed by the project per minute. USAGE: deployctl top [OPTIONS] OPTIONS: -h, --help Prints this help information -p, --project= The project to monitor. --token= The API token to use (defaults to DENO_DEPLOY_TOKEN env var) --config= Path to the file from where to load DeployCTL config. Defaults to 'deno.json' --color= Enable colored output. Defaults to 'auto' (colored when stdout is a tty) --format= Output the project stats in a table or JSON-encoded. Defaults to 'table' when stdout is a tty, and 'json' otherwise. --region= Show stats from only specific regions. Can be used multiple times (--region=us-east4 --region=us-west2). Can also be a substring (--region=us) `; export default async function topSubcommand(args: Args) { if (args.help) { console.log(help); Deno.exit(0); } if (!args.project) { error( "No project specified. Use --project to specify the project of which to stream the stats", ); } let format: "table" | "json"; switch (args.format) { case "table": case "json": format = args.format; break; case undefined: format = Deno.stdout.isTerminal() ? "table" : "json"; break; default: error( `Invalid format '${args.format}'. Supported values for the --format option are 'table' or 'json'`, ); } const spinner = wait( `Connecting to the stats stream of project '${args.project}'...`, ).start(); const api = args.token ? API.fromToken(args.token) : API.withTokenProvisioner(TokenProvisioner); let stats; try { stats = await api.streamMetering(args.project!); } catch (err) { spinner.fail( `Failed to connect to the stats stream of project '${args.project}': ${ stringifyError(err, { verbose: true }) }`, ); return Deno.exit(1); } spinner.succeed( `Connected to the stats stream of project '${args.project}'`, ); if (args.region.length !== 0) { const allStats = stats; const filter = args.region.flatMap((r) => r.split(",")).map((r) => r.trim() ); stats = async function* () { for await (const line of allStats) { for (const region of filter) { if (line.region.includes(region)) { yield line; break; } } } }(); } switch (format) { case "table": return await tabbed(stats); case "json": return await json(stats); } } async function tabbed(stats: AsyncGenerator) { const table: { [id: string]: { region: string; [other: string]: unknown } } = {}; const timeouts: { [id: string]: number } = {}; const toDelete: string[] = []; const spinner = wait("Streaming...").start(); let previousLength = 0; const renderStream = async function* () { // First render after 1 sec in case there's already data await delay(1_000); yield true; while (true) { await delay(5_000); yield true; } }(); try { let next = stats.next(); let render = renderStream.next(); while (true) { const result = await Promise.race([next, render]); const stat = result.value; if (stat === undefined) { // Only stats stream can end, returning undefined spinner.succeed("Stream ended"); return; } if (typeof stat === "object") { next = stats.next(); const id = encodeHex( await sha256(stat.id + stat.region + stat.deploymentId), ) .slice(0, 6); table[id] = { "deployment": stat.deploymentId, "region": stat.region, "Req/min": Math.ceil(stat.requestsPerMinute), "CPU%": parseFloat((stat.cpuTimePerSecond / 10).toFixed(2)), "CPU/req": parseFloat((stat.cpuTimePerRequest || 0).toFixed(2)), "RSS/5min": parseFloat( (stat.maxRss5Minutes / 1_000_000).toFixed(3), ), "Ingress/min": parseFloat( (stat.ingressBytesPerMinute / 1_000).toFixed(3), ), "Egress/min": parseFloat( (stat.egressBytesPerMinute / 1_000).toFixed(3), ), "KVr/min": Math.ceil(stat.kvReadUnitsPerMinute), "KVw/min": Math.ceil(stat.kvWriteUnitsPerMinute), "QSenq/min": Math.ceil(stat.enqueuePerMinute), "QSdeq/min": Math.ceil(stat.dequeuePerMinute), }; clearTimeout(timeouts[id]); timeouts[id] = setTimeout( (idToDelete: string) => { toDelete.push(idToDelete); }, 30_000, id, ); } else { render = renderStream.next(); while (toDelete.length > 0) { const idToDelete = toDelete.pop(); if (idToDelete) { delete table[idToDelete]; } } const linesToClear = previousLength ? previousLength + 5 : 1; previousLength = Object.keys(table).length; tty.goUpSync(linesToClear, Deno.stdout); tty.clearDownSync(Deno.stdout); const entries = Object.entries(table); // Kinda sort the table entries.sort(([_aid, a], [_bid, b]) => a.region.localeCompare(b.region) ); if (Object.keys(table).length > 0) { console.table(Object.fromEntries(entries)); } console.log(); } } } catch (error) { spinner.fail(`Stream disconnected: ${error}`); Deno.exit(1); } } async function json(stats: AsyncGenerator) { for await (const stat of stats) { console.log(JSON.stringify(stat)); } } ================================================ FILE: src/subcommands/upgrade.ts ================================================ // Copyright 2021 Deno Land Inc. All rights reserved. MIT license. import { error } from "../error.ts"; import { canParse as semverValid, greaterOrEqual as semverGreaterThanOrEquals, parse as semverParse, } from "@std/semver"; import { VERSION } from "../version.ts"; const help = `deployctl upgrade Upgrade deployctl to the given version (defaults to latest). To upgrade to latest version: deployctl upgrade To upgrade to specific version: deployctl upgrade 1.2.3 The version is downloaded from https://deno.land/x/deploy/deployctl.ts USAGE: deployctl upgrade [OPTIONS] [] OPTIONS: -h, --help Prints help information ARGS: The version to upgrade to (defaults to latest) `; export interface Args { help: boolean; } // deno-lint-ignore no-explicit-any export default async function (rawArgs: Record): Promise { const args: Args = { help: !!rawArgs.help, }; const version = typeof rawArgs._[0] === "string" ? rawArgs._[0] : null; if (args.help) { console.log(help); Deno.exit(); } if (rawArgs._.length > 1) { console.error(help); error("Too many positional arguments given."); } if (version && !semverValid(version)) { error(`The provided version is invalid.`); } const { latest, versions } = await getVersions().catch((err: TypeError) => { error(err.message); }); if (version && !versions.includes(version)) { error( "The provided version is not found.\n\nVisit https://github.com/denoland/deployctl/releases/ for available releases.", ); } if ( !version && semverGreaterThanOrEquals(semverParse(VERSION), semverParse(latest)) ) { console.log("You're using the latest version."); Deno.exit(); } else { const process = new Deno.Command(Deno.execPath(), { args: [ "install", "--allow-read", "--allow-write", "--allow-env", "--allow-net", "--allow-run", "--allow-sys", "--no-check", "--force", "--quiet", `https://deno.land/x/deploy@${version ? version : latest}/deployctl.ts`, ], }).spawn(); await process.status; } } export async function getVersions(): Promise< { latest: string; versions: string[] } > { const aborter = new AbortController(); const timer = setTimeout(() => aborter.abort(), 2500); const response = await fetch( "https://cdn.deno.land/deploy/meta/versions.json", { signal: aborter.signal }, ); if (!response.ok) { throw new Error( "couldn't fetch the latest version - try again after sometime", ); } const data = await response.json(); clearTimeout(timer); return data; } ================================================ FILE: src/utils/access_token.ts ================================================ import { interruptSpinner, wait } from "./spinner.ts"; import { error } from "../error.ts"; import { endpoint, USER_AGENT } from "./api.ts"; import tokenStorage from "./token_storage.ts"; import { base64url, sha256 } from "./hashing_encoding.ts"; import { stringify as stringifyError } from "../error.ts"; export default { get: tokenStorage.get, async provision() { // Synchronize provision routine // to prevent multiple authorization flows from triggering concurrently this.provisionPromise ??= provision(); const token = await this.provisionPromise; this.provisionPromise = null; return token; }, provisionPromise: null as Promise | null, revoke: tokenStorage.remove, }; async function provision(): Promise { const spinnerInterrupted = interruptSpinner(); wait("").start().info("Provisioning a new access token..."); const randomBytes = crypto.getRandomValues(new Uint8Array(32)); const claimVerifier = base64url(randomBytes); const claimChallenge = base64url(await sha256(claimVerifier)); const tokenStream = await fetch( `${endpoint()}/api/signin/cli/access_token`, { method: "POST", headers: { "User-Agent": USER_AGENT }, body: claimVerifier, }, ); if (!tokenStream.ok) { error( `when requesting an access token: ${await tokenStream.statusText}`, ); } const url = `${endpoint()}/signin/cli?claim_challenge=${claimChallenge}`; wait("").start().info(`Authorization URL: ${url}`); let openCmd; const args = []; // TODO(arnauorriols): use npm:open or deno.land/x/open when either is compatible switch (Deno.build.os) { case "darwin": { openCmd = "open"; break; } case "linux": { openCmd = "xdg-open"; break; } case "windows": { // Windows Start-Process is a cmdlet of PowerShell openCmd = "PowerShell.exe"; args.push("Start-Process"); break; } } args.push(url); let open; if (openCmd !== undefined) { try { open = new Deno.Command(openCmd, { args, stderr: "piped", stdout: "piped", }) .spawn(); } catch (error) { wait("").start().warn( "Unexpected error while trying to open the authorization URL in your default browser. Please report it at https://github.com/denoland/deployctl/issues/new.", ); wait({ text: "", indent: 3 }).start().fail(stringifyError(error)); } } if (open == undefined) { const warn = "Cannot open the authorization URL automatically. Please navigate to it manually using your usual browser"; wait("").start().info(warn); } else if (!(await open.status).success) { const warn = "Failed to open the authorization URL in your default browser. Please navigate to it manually"; wait("").start().warn(warn); let error = new TextDecoder().decode((await open.output()).stderr); const errIndent = 2; const elipsis = "..."; const maxErrLength = warn.length - errIndent; if (error.length > maxErrLength) { error = error.slice(0, maxErrLength - elipsis.length) + elipsis; } // resulting indentation is 1 less than configured wait({ text: "", indent: errIndent + 1 }).start().fail(error); } const spinner = wait("Waiting for authorization...").start(); const tokenOrError = await tokenStream.json(); if (tokenOrError.error) { error(`could not provision the access token: ${tokenOrError.error}`); } await tokenStorage.store(tokenOrError.token); spinner.succeed("Token obtained successfully"); spinnerInterrupted.resume(); return tokenOrError.token; } ================================================ FILE: src/utils/api.ts ================================================ import { delay } from "@std/async/delay"; import { TextLineStream } from "@std/streams/text_line_stream"; import { VERSION } from "../version.ts"; import type { Build, BuildsPage, Cron, Database, DeploymentProgress, DeploymentV1, Domain, GitHubActionsDeploymentRequest, LiveLog, LogQueryRequestParams, ManifestEntry, Metadata, Organization, PagingInfo, PersistedLog, Project, ProjectStats, PushDeploymentRequest, } from "./api_types.ts"; import { interruptSpinner, wait } from "./spinner.ts"; export const USER_AGENT = `DeployCTL/${VERSION} (${Deno.build.os} ${Deno.osRelease()}; ${Deno.build.arch})`; export interface RequestOptions { method?: string; body?: unknown; accept?: string; } export class APIError extends Error { code: string; xDenoRay: string | null; override name = "APIError"; constructor(code: string, message: string, xDenoRay: string | null) { super(message); this.code = code; this.xDenoRay = xDenoRay; } override toString() { let error = `${this.name}: ${this.message}`; if (this.xDenoRay !== null) { error += `\nx-deno-ray: ${this.xDenoRay}`; error += "\nIf you encounter this error frequently," + " contact us at deploy@deno.com with the above x-deno-ray."; } return error; } } export function endpoint(): string { return Deno.env.get("DEPLOY_API_ENDPOINT") ?? "https://dash.deno.com"; } interface TokenProvisioner { /** * Get the access token from a secure local storage or any other cache form. * If there isn't any token cached, returns `null`. */ get(): Promise; /** * Provision a new access token for DeployCTL */ provision(): Promise; /** * Delete the token from cache, forcing a new provision in the next request */ revoke(): Promise; } interface Logger { debug: (message: string) => void; info: (message: string) => void; notice: (message: string) => void; warning: (message: string) => void; error: (message: string) => void; } interface APIConfig { /** * When enabled, x-deno-ray in responses will always be printed even if the * request is successful. */ alwaysPrintXDenoRay: boolean; /** * Logger interface to use for logging certain events */ logger: Logger; } export class API { #endpoint: string; #authorization: string | TokenProvisioner; #config: APIConfig; constructor( authorization: string | TokenProvisioner, endpoint: string, config?: Partial, ) { this.#authorization = authorization; this.#endpoint = endpoint; const DEFAULT_CONFIG: APIConfig = { alwaysPrintXDenoRay: false, logger: { debug: (m) => console.debug(m), info: (m) => console.info(m), notice: (m) => console.log(m), warning: (m) => console.warn(m), error: (m) => console.error(m), }, }; this.#config = DEFAULT_CONFIG; this.#config.alwaysPrintXDenoRay = config?.alwaysPrintXDenoRay ?? DEFAULT_CONFIG.alwaysPrintXDenoRay; this.#config.logger = config?.logger ?? DEFAULT_CONFIG.logger; } static fromToken(token: string) { return new API(`Bearer ${token}`, endpoint()); } static withTokenProvisioner(provisioner: TokenProvisioner) { return new API(provisioner, endpoint()); } async request(path: string, opts: RequestOptions = {}): Promise { const url = `${this.#endpoint}/api${path}`; const method = opts.method ?? "GET"; const body = typeof opts.body === "string" || opts.body instanceof FormData ? opts.body : JSON.stringify(opts.body); const authorization = typeof this.#authorization === "string" ? this.#authorization : `Bearer ${ await this.#authorization.get() ?? await this.#authorization.provision() }`; const sudo = Deno.env.get("SUDO"); const headers = { "User-Agent": USER_AGENT, "Accept": opts.accept ?? "application/json", "Authorization": authorization, ...(opts.body !== undefined ? opts.body instanceof FormData ? {} : { "Content-Type": "application/json" } : {}), ...(sudo ? { ["x-deploy-sudo"]: sudo } : {}), }; let res = await fetch(url, { method, headers, body }); if (this.#config.alwaysPrintXDenoRay) { this.#config.logger.notice( `x-deno-ray: ${res.headers.get("x-deno-ray")}`, ); } if (res.status === 401 && typeof this.#authorization === "object") { // Token expired or revoked. Provision again and retry headers.Authorization = `Bearer ${await this.#authorization.provision()}`; res = await fetch(url, { method, headers, body }); } return res; } async #requestJson(path: string, opts?: RequestOptions): Promise { const res = await this.request(path, opts); if (res.headers.get("Content-Type") !== "application/json") { const text = await res.text(); throw new Error(`Expected JSON, got '${text}'`); } const json = await res.json(); if (res.status !== 200) { const xDenoRay = res.headers.get("x-deno-ray"); throw new APIError(json.code, json.message, xDenoRay); } return json; } async #requestStream( path: string, opts?: RequestOptions, ): Promise> { const res = await this.request(path, opts); if (res.status !== 200) { const json = await res.json(); const xDenoRay = res.headers.get("x-deno-ray"); throw new APIError(json.code, json.message, xDenoRay); } if (res.body === null) { throw new Error("Stream ended unexpectedly"); } const lines: ReadableStream = res.body .pipeThrough(new TextDecoderStream()) .pipeThrough(new TextLineStream()); return async function* (): AsyncGenerator { for await (const line of lines) { if (line === "") return; yield line; } }(); } async #requestJsonStream( path: string, opts?: RequestOptions, ): Promise> { const stream = await this.#requestStream(path, opts); return async function* () { for await (const line of stream) { yield JSON.parse(line); } }(); } async getOrganizationByName(name: string): Promise { const organizations: Organization[] = await this.#requestJson( `/organizations`, ); for (const org of organizations) { if (org.name === name) { return org; } } } async getOrganizationById(id: string): Promise { return await this.#requestJson(`/organizations/${id}`); } async createOrganization(name: string): Promise { const body = { name }; return await this.#requestJson( `/organizations`, { method: "POST", body }, ); } async listOrganizations(): Promise { return await this.#requestJson(`/organizations`); } async getProject(id: string): Promise { try { return await this.#requestJson(`/projects/${id}`); } catch (err) { if (err instanceof APIError && err.code === "projectNotFound") { return null; } throw err; } } async createProject( name?: string, organizationId?: string, envs?: Record, ): Promise { const body = { name, organizationId, envs }; return await this.#requestJson(`/projects/`, { method: "POST", body }); } async renameProject( id: string, newName: string, ): Promise { const body = { name: newName }; await this.#requestJson(`/projects/${id}`, { method: "PATCH", body }); } async deleteProject( id: string, ): Promise { try { await this.#requestJson(`/projects/${id}`, { method: "DELETE" }); return true; } catch (err) { if (err instanceof APIError && err.code === "projectNotFound") { return false; } throw err; } } async listProjects( orgId: string, ): Promise { const org: { projects: Project[] } = await this.#requestJson( `/organizations/${orgId}`, ); return org.projects; } async getDomains(projectId: string): Promise { return await this.#requestJson(`/projects/${projectId}/domains`); } async listDeployments( projectId: string, page?: number, limit?: number, ): Promise { const query = new URLSearchParams(); if (page !== undefined) { query.set("page", page.toString()); } if (limit !== undefined) { query.set("limit", limit.toString()); } try { const [list, paging]: [Build[], PagingInfo] = await this.#requestJson( `/projects/${projectId}/deployments?${query}`, ); return { list, paging }; } catch (err) { if (err instanceof APIError && err.code === "projectNotFound") { return null; } throw err; } } async *listAllDeployments( projectId: string, ): AsyncGenerator { let totalPages = 1; let page = 0; while (totalPages > page) { const [deployments, paging]: [Build[], PagingInfo] = await this .#requestJson( `/projects/${projectId}/deployments/?limit=50&page=${page}`, ); for (const deployment of deployments) { yield deployment; } totalPages = paging.totalPages; page = paging.page + 1; } } async getDeployment( deploymentId: string, ): Promise { try { return await this.#requestJson(`/deployments/${deploymentId}`); } catch (err) { if (err instanceof APIError && err.code === "deploymentNotFound") { return null; } throw err; } } async deleteDeployment( deploymentId: string, ): Promise { try { await this.#requestJson(`/v1/deployments/${deploymentId}`, { method: "DELETE", }); return true; } catch (err) { if (err instanceof APIError && err.code === "deploymentNotFound") { return false; } throw err; } } async redeployDeployment( deploymentId: string, redeployParams: { prod?: boolean; env_vars?: Record; databases?: { default: string }; }, ): Promise { try { return await this.#requestJson( `/v1/deployments/${deploymentId}/redeploy?internal=true`, { method: "POST", body: redeployParams, }, ); } catch (err) { if (err instanceof APIError && err.code === "deploymentNotFound") { return null; } throw err; } } getLogs( projectId: string, deploymentId: string, ): Promise> { return this.#requestJsonStream( `/projects/${projectId}/deployments/${deploymentId}/logs/`, { accept: "application/x-ndjson", }, ); } async queryLogs( projectId: string, deploymentId: string, params: LogQueryRequestParams, ): Promise<{ logs: PersistedLog[] }> { const searchParams = new URLSearchParams({ params: JSON.stringify(params), }); return await this.#requestJson( `/projects/${projectId}/deployments/${deploymentId}/query_logs?${searchParams.toString()}`, ); } async projectNegotiateAssets( id: string, manifest: { entries: Record }, ): Promise { return await this.#requestJson(`/projects/${id}/assets/negotiate`, { method: "POST", body: manifest, }); } pushDeploy( projectId: string, request: PushDeploymentRequest, files: Uint8Array[], ): Promise> { const form = new FormData(); form.append("request", JSON.stringify(request)); for (const bytes of files) { form.append("file", new Blob([bytes])); } return this.#requestJsonStream( `/projects/${projectId}/deployment_with_assets`, { method: "POST", body: form }, ); } gitHubActionsDeploy( projectId: string, request: GitHubActionsDeploymentRequest, files: Uint8Array[], ): Promise> { const form = new FormData(); form.append("request", JSON.stringify(request)); for (const bytes of files) { form.append("file", new Blob([bytes])); } return this.#requestJsonStream( `/projects/${projectId}/deployment_github_actions`, { method: "POST", body: form }, ); } getMetadata(): Promise { return this.#requestJson("/meta"); } async streamMetering( project: string, ): Promise> { const streamGen = () => this.#requestStream(`/projects/${project}/stats`); let stream = await streamGen(); return async function* () { for (;;) { try { for await (const line of stream) { try { yield JSON.parse(line); } catch { // Stopgap while the streaming errors are fixed } } } catch (error) { // Stopgap while the streaming errors are fixed const interrupt = interruptSpinner(); const spinner = wait(`Error: ${error}. Reconnecting...`).start(); await delay(5_000); stream = await streamGen(); spinner.stop(); interrupt.resume(); } } }(); } async getProjectDatabases(project: string): Promise { try { return await this.#requestJson(`/projects/${project}/databases`); } catch (err) { if (err instanceof APIError && err.code === "projectNotFound") { return null; } throw err; } } async getDeploymentCrons( projectId: string, deploymentId: string, ): Promise { return await this.#requestJson( `/projects/${projectId}/deployments/${deploymentId}/crons`, ); } async getProjectCrons( projectId: string, ): Promise { try { return await this.#requestJson( `/projects/${projectId}/deployments/latest/crons`, ); } catch (err) { // When the project does not have a production deployment, API returns deploymentNotFound if (err instanceof APIError && err.code === "deploymentNotFound") { return null; } throw err; } } } ================================================ FILE: src/utils/api_types.ts ================================================ export interface DomainMapping { domain: string; createdAt: string; updatedAt: string; } export interface Build { id: string; relatedCommit?: { hash: string; branch?: string; message: string; authorName: string; authorEmail: string; authorGithubUsername: string; url: string; }; deployment: Deployment | null; deploymentId: string; project: Project; createdAt: string; logs: DeploymentProgress[]; } export interface Deployment { id: string; description: string; url: string; domainMappings: DomainMapping[]; project?: Project; projectId: string; createdAt: string; updatedAt: string; envVars: string[]; kvDatabases: Record; } export type DeploymentV1 = { id: string; projectId: string; description?: string; status: "failed" | "pending" | "success"; domains: string[]; databases: Record; createdAt: string; updatedAt: string; }; export interface BuildsPage { list: Build[]; paging: PagingInfo; } export interface Project { id: string; name: string; type: "git" | "playground"; git?: { repository: { owner: string; name: string }; productionBranch: string; }; productionDeployment?: Build | null; hasProductionDeployment: boolean; organizationId: string; organization: Organization; createdAt: string; updatedAt: string; envVars: string[]; } export type Organization = UserOrganization | NormalOrganization; export type UserOrganization = CommonOrganization & { name: null; }; export type NormalOrganization = CommonOrganization & { name: string; }; export interface CommonOrganization { id: string; members: OrganizationMember[]; } export interface OrganizationMember { user: User; } export interface User { name: string; } export interface PagingInfo { page: number; count: number; limit: number; totalCount: number; totalPages: number; } export interface ManifestEntryFile { kind: "file"; gitSha1: string; size: number; } export interface ManifestEntryDirectory { kind: "directory"; entries: Record; } export interface ManifestEntrySymlink { kind: "symlink"; target: string; } export type ManifestEntry = | ManifestEntryFile | ManifestEntryDirectory | ManifestEntrySymlink; export interface PushDeploymentRequest { url: string; importMapUrl: string | null; production: boolean; manifest?: { entries: Record }; } export interface GitHubActionsDeploymentRequest { url: string; importMapUrl: string | null; manifest: { entries: Record }; event?: unknown; } export type DeploymentProgress = | DeploymentProgressStaticFile | DeploymentProgressLoad | DeploymentProgressUploadComplete | DeploymentProgressSuccess | DeploymentProgressError; export interface DeploymentProgressStaticFile { type: "staticFile"; currentBytes: number; totalBytes: number; } export interface DeploymentProgressLoad { type: "load"; url: string; seen: number; total: number; } export interface DeploymentProgressUploadComplete { type: "uploadComplete"; } export interface DeploymentProgressSuccess extends Deployment { type: "success"; } export interface DeploymentProgressError { type: "error"; code: string; ctx: string; } export interface LiveLogReady { type: "ready"; } export interface LiveLogPing { type: "ping"; } export interface LiveLogMessage { type: "message"; time: string; message: string; level: "debug" | "info" | "warning" | "error"; region: string; } export type LiveLog = | LiveLogReady | LiveLogPing | LiveLogMessage; export interface LogQueryRequestParams { regions?: string[]; levels?: string[]; // RFC3339 since?: string; // RFC3339 until?: string; q?: string[]; limit?: number; } export interface PersistedLog { deploymentId: string; isolateId: string; region: string; level: "debug" | "info" | "warning" | "error"; // RFC3339 timestamp: string; message: string; } export interface Metadata { regionCodes: string[]; } export interface Domain { domain: string; isValidated: boolean; } export interface ProjectStats { id: string; region: string; projectId: string; deploymentId: string; uptime: number; requestsPerMinute: number; cpuTimePerSecond: number; cpuTimePerRequest: number; maxRss5Minutes: number; ingressBytesPerMinute: number; egressBytesPerMinute: number; kvReadUnitsPerMinute: number; kvWriteUnitsPerMinute: number; enqueuePerMinute: number; dequeuePerMinute: number; } export interface Database { branch: string; databaseId: string; bindingName: string; description: string; sizeBytes?: number; availableRegions: string[]; createdAt: string; updatedAt: string; } export interface Cron { cron_spec: { name: string; schedule: string; backoff_schedule?: number; }; status?: CronStatus; history: CronExecutionRetry[][]; } export interface CronExecutionRetry { status: "success" | "failure" | "executing"; start_ms: number; end_ms: number; error_message?: string; deployment_id: string; } type CronStatus = | { status: "unscheduled" } | { status: "scheduled"; deadline_ms: number } | { status: "executing"; retries: CronExecutionRetry[] }; ================================================ FILE: src/utils/crons.ts ================================================ import { green, red, stripAnsiCode } from "@std/fmt/colors"; import type { Cron, CronExecutionRetry } from "./api_types.ts"; import { renderTimeDelta } from "./time.ts"; export function renderCron(cron: Cron): string { return `${cron.cron_spec.name} [${cron.cron_spec.schedule}] ${ renderCronStatus(cron) }`; } function renderCronStatus(cron: Cron): string { if (!cron.status) { return "n/a"; } switch (cron.status.status) { case "unscheduled": return `${ cron.history.length > 0 ? `${renderLastCronExecution(cron.history[0][0])} ` : "" }(unscheduled)`; case "executing": if (cron.status.retries.length > 0) { return `${ renderLastCronExecution(cron.status.retries[0]) } (retrying...)`; } else { return "(executing...)"; } case "scheduled": return `${ cron.history.length > 0 ? `${renderLastCronExecution(cron.history[0][0])} ` : "" }(next at ${ new Date(cron.status.deadline_ms).toLocaleString(navigator.language, { timeZoneName: "short", }) })`; } } function renderLastCronExecution(execution: CronExecutionRetry): string { const start = new Date(execution.start_ms); const end = new Date(execution.end_ms); const duration = end.getTime() - start.getTime(); const status = execution.status === "success" ? green("succeeded") : execution.status === "failure" ? red("failed") : "executing"; return `${status} at ${ start.toLocaleString(navigator.language, { timeZoneName: "short" }) } after ${stripAnsiCode(renderTimeDelta(duration))}`; } ================================================ FILE: src/utils/entrypoint.ts ================================================ import { resolve, toFileUrl } from "@std/path"; import { stringify as stringifyError } from "../error.ts"; /** * Parses the entrypoint to a URL. * Ensures the file exists when the entrypoint is a local file. */ export async function parseEntrypoint( entrypoint: string, root?: string, diagnosticName = "entrypoint", ): Promise { let entrypointSpecifier: URL; try { if (isURL(entrypoint)) { entrypointSpecifier = new URL(entrypoint); } else { entrypointSpecifier = toFileUrl(resolve(root ?? Deno.cwd(), entrypoint)); } } catch (err) { throw `Failed to parse ${diagnosticName} specifier '${entrypoint}': ${ stringifyError(err) }`; } if (entrypointSpecifier.protocol === "file:") { try { await Deno.lstat(entrypointSpecifier); } catch (err) { throw `Failed to open ${diagnosticName} file at '${entrypointSpecifier}': ${ stringifyError(err) }`; } } return entrypointSpecifier; } export function isURL(entrypoint: string): boolean { return entrypoint.startsWith("https://") || entrypoint.startsWith("http://") || entrypoint.startsWith("file://") || entrypoint.startsWith("data:") || entrypoint.startsWith("jsr:") || entrypoint.startsWith("npm:"); } ================================================ FILE: src/utils/env_vars.ts ================================================ import * as dotenv from "@std/dotenv"; import type { Args } from "../args.ts"; /** * Obtain the env variables provided by the user with the --env and --env-file options. * * Both --env and --env-file options can be used multiple times. In case of conflict, the last * option takes precedence. Env vars set with --env always takes precedence over envs in env files. */ export async function envVarsFromArgs( args: Args, ): Promise | null> { const fileEnvs = (await Promise.all( args["env-file"].map((envFile) => dotenv.load({ ...envFile ? { envPath: envFile } : {} }) ), )).reduce((a, b) => Object.assign(a, b), {}); const standaloneEnvs = dotenv.parse(args.env.join("\n")); const envVars = { ...fileEnvs, ...standaloneEnvs, }; return Object.keys(envVars).length > 0 ? envVars : null; } ================================================ FILE: src/utils/hashing_encoding.ts ================================================ export function base64url(binary: Uint8Array): string { const binaryString = Array.from(binary).map((b) => String.fromCharCode(b)) .join(""); const output = btoa(binaryString); const urlSafeOutput = output .replaceAll("=", "") .replaceAll("+", "-") .replaceAll("/", "_"); return urlSafeOutput; } export async function sha256(randomString: string): Promise { return new Uint8Array( await crypto.subtle.digest( "SHA-256", new TextEncoder().encode(randomString), ), ); } ================================================ FILE: src/utils/info.ts ================================================ import { join } from "@std/path/join"; import { getVersions } from "../subcommands/upgrade.ts"; export function getConfigPaths() { const homeDir = Deno.build.os == "windows" ? Deno.env.get("USERPROFILE")! : Deno.env.get("HOME")!; const configDir = join(homeDir, ".deno", "deployctl"); return { configDir, updatePath: join(configDir, "update.json"), credentialsPath: join(configDir, "credentials.json"), }; } export async function fetchReleases() { try { const { latest } = await getVersions(); const updateInfo = { lastFetched: Date.now(), latest }; const { updatePath, configDir } = getConfigPaths(); await Deno.mkdir(configDir, { recursive: true }); await Deno.writeFile( updatePath, new TextEncoder().encode(JSON.stringify(updateInfo, null, 2)), ); } catch (_) { // We will try again later when the fetch isn't successful, // so we shouldn't report errors. } } ================================================ FILE: src/utils/manifest.ts ================================================ import { globToRegExp, isGlob, join, normalize } from "@std/path"; import type { ManifestEntry } from "./api_types.ts"; /** Calculate git object hash, like `git hash-object` does. */ export async function calculateGitSha1(bytes: Uint8Array) { const prefix = `blob ${bytes.byteLength}\0`; const prefixBytes = new TextEncoder().encode(prefix); const fullBytes = new Uint8Array(prefixBytes.byteLength + bytes.byteLength); fullBytes.set(prefixBytes); fullBytes.set(bytes, prefixBytes.byteLength); const hashBytes = await crypto.subtle.digest("SHA-1", fullBytes); const hashHex = Array.from(new Uint8Array(hashBytes)) .map((b) => b.toString(16).padStart(2, "0")) .join(""); return hashHex; } function include( path: string, include: RegExp[], exclude: RegExp[], ): boolean { if ( include.length && !include.some((pattern): boolean => pattern.test(normalize(path))) ) { return false; } if ( exclude.length && exclude.some((pattern): boolean => pattern.test(normalize(path))) ) { return false; } return true; } export async function walk( cwd: string, dir: string, options: { include: RegExp[]; exclude: RegExp[] }, ): Promise< { manifestEntries: Record; hashPathMap: Map; } > { const hashPathMap = new Map(); const manifestEntries = await walkInner(cwd, dir, hashPathMap, options); return { manifestEntries, hashPathMap, }; } async function walkInner( cwd: string, dir: string, hashPathMap: Map, options: { include: RegExp[]; exclude: RegExp[] }, ): Promise> { const entries: Record = {}; for await (const file of Deno.readDir(dir)) { const path = join(dir, file.name); const relative = path.slice(cwd.length); if ( // Do not test directories, because --include=foo/bar must include the directory foo (same goes with --include=*/bar) !file.isDirectory && !include( path.slice(cwd.length + 1), options.include, options.exclude, ) ) { continue; } let entry: ManifestEntry; if (file.isFile) { const data = await Deno.readFile(path); const gitSha1 = await calculateGitSha1(data); entry = { kind: "file", gitSha1, size: data.byteLength, }; hashPathMap.set(gitSha1, path); } else if (file.isDirectory) { if (relative === "/.git") continue; entry = { kind: "directory", entries: await walkInner(cwd, path, hashPathMap, options), }; } else if (file.isSymlink) { const target = await Deno.readLink(path); entry = { kind: "symlink", target, }; } else { throw new Error(`Unreachable`); } entries[file.name] = entry; } return entries; } /** * Converts a file path pattern, which may be a glob, to a RegExp instance. * * @param pattern file path pattern which may be a glob * @returns a RegExp instance that is equivalent to the given pattern */ export function convertPatternToRegExp(pattern: string): RegExp { return isGlob(pattern) // slice is used to remove the end-of-string anchor '$' ? new RegExp(globToRegExp(normalize(pattern)).toString().slice(1, -2)) : new RegExp(`^${normalize(pattern)}`); } /** * Determines if the manifest contains the entry at the given relative path. * * @param manifestEntries manifest entries to search * @param entryRelativePathToLookup a relative path to look up in the manifest * @returns `true` if the manifest contains the entry at the given relative path */ export function containsEntryInManifest( manifestEntries: Record, entryRelativePathToLookup: string, ): boolean { for (const [entryName, entry] of Object.entries(manifestEntries)) { switch (entry.kind) { case "file": case "symlink": { if (entryName === entryRelativePathToLookup) { return true; } break; } case "directory": { if (!entryRelativePathToLookup.startsWith(entryName)) { break; } const relativePath = entryRelativePathToLookup.slice( entryName.length + 1, ); return containsEntryInManifest(entry.entries, relativePath); } default: { const _: never = entry; } } } return false; } ================================================ FILE: src/utils/manifest_test.ts ================================================ import { dirname, fromFileUrl, join } from "@std/path"; import { assert, assertEquals, assertFalse } from "@std/assert"; import type { ManifestEntry } from "./api_types.ts"; import { containsEntryInManifest, convertPatternToRegExp, walk, } from "./manifest.ts"; Deno.test({ name: "convertPatternToRegExp", ignore: Deno.build.os === "windows", fn: () => { assertEquals(convertPatternToRegExp("foo"), new RegExp("^foo")); assertEquals(convertPatternToRegExp(".././foo"), new RegExp("^../foo")); assertEquals(convertPatternToRegExp("*.ts"), new RegExp("^[^/]*\\.ts/*")); }, }); Deno.test({ name: "walk and containsEntryInManifest", fn: async (t) => { type Test = { name: string; input: { testdir: string; include: readonly string[]; exclude: readonly string[]; }; expected: { entries: Record; containedEntries: readonly string[]; notContainedEntries: readonly string[]; }; }; const tests: Test[] = [ { name: "single_file", input: { testdir: "single_file", include: [], exclude: [], }, expected: { entries: { "a.txt": { kind: "file", gitSha1: "78981922613b2afb6025042ff6bd878ac1994e85", size: 2, }, }, containedEntries: ["a.txt"], notContainedEntries: ["b.txt", ".git", "deno.json"], }, }, { name: "single_file with include", input: { testdir: "single_file", include: ["a.txt"], exclude: [], }, expected: { entries: { "a.txt": { kind: "file", gitSha1: "78981922613b2afb6025042ff6bd878ac1994e85", size: 2, }, }, containedEntries: ["a.txt"], notContainedEntries: ["b.txt", ".git", "deno.json"], }, }, { name: "single_file with include 2", input: { testdir: "single_file", include: ["*.txt"], exclude: [], }, expected: { entries: { "a.txt": { kind: "file", gitSha1: "78981922613b2afb6025042ff6bd878ac1994e85", size: 2, }, }, containedEntries: ["a.txt"], notContainedEntries: ["b.txt", ".git", "deno.json"], }, }, { name: "single_file with exclude", input: { testdir: "single_file", include: [], exclude: ["a.txt"], }, expected: { entries: {}, containedEntries: [], notContainedEntries: ["a.txt", "b.txt", ".git", "deno.json"], }, }, { name: "two_levels", input: { testdir: "two_levels", include: [], exclude: [], }, expected: { entries: { "a.txt": { kind: "file", gitSha1: "78981922613b2afb6025042ff6bd878ac1994e85", size: 2, }, "inner": { kind: "directory", entries: { "b.txt": { kind: "file", gitSha1: "61780798228d17af2d34fce4cfbdf35556832472", size: 2, }, }, }, }, containedEntries: ["a.txt", "inner/b.txt"], notContainedEntries: [ "b.txt", "inner/a.txt", ".git", "deno.json", "inner", ], }, }, { name: "two_levels with include", input: { testdir: "two_levels", include: ["**/b.txt"], exclude: [], }, expected: { entries: { "inner": { kind: "directory", entries: { "b.txt": { kind: "file", gitSha1: "61780798228d17af2d34fce4cfbdf35556832472", size: 2, }, }, }, }, containedEntries: ["inner/b.txt"], notContainedEntries: [ "a.txt", "b.txt", "inner/a.txt", ".git", "deno.json", "inner", ], }, }, { name: "two_levels with exclude", input: { testdir: "two_levels", include: [], exclude: ["*.txt"], }, expected: { entries: { "inner": { kind: "directory", entries: { "b.txt": { kind: "file", gitSha1: "61780798228d17af2d34fce4cfbdf35556832472", size: 2, }, }, }, }, containedEntries: ["inner/b.txt"], notContainedEntries: [ "a.txt", "b.txt", "inner/a.txt", ".git", "deno.json", "inner", ], }, }, { name: "complex", input: { testdir: "complex", include: [], exclude: [], }, expected: { entries: { "a.txt": { kind: "file", gitSha1: "78981922613b2afb6025042ff6bd878ac1994e85", size: 2, }, "inner1": { kind: "directory", entries: { "b.txt": { kind: "file", gitSha1: "61780798228d17af2d34fce4cfbdf35556832472", size: 2, }, }, }, "inner2": { kind: "directory", entries: { "b.txt": { kind: "file", gitSha1: "61780798228d17af2d34fce4cfbdf35556832472", size: 2, }, }, }, }, containedEntries: ["a.txt", "inner1/b.txt", "inner2/b.txt"], notContainedEntries: [ "b.txt", "inner1/a.txt", "inner2/a.txt", ".git", "deno.json", "inner1", "inner2", ], }, }, ]; for (const test of tests) { await t.step({ name: test.name, fn: async () => { const { manifestEntries } = await walk( join( fromFileUrl(dirname(import.meta.url)), "manifest_testdata", test.input.testdir, ), join( fromFileUrl(dirname(import.meta.url)), "manifest_testdata", test.input.testdir, ), { include: test.input.include.map(convertPatternToRegExp), exclude: test.input.exclude.map(convertPatternToRegExp), }, ); assertEquals(manifestEntries, test.expected.entries); for (const entry of test.expected.containedEntries) { const contained = containsEntryInManifest(manifestEntries, entry); assert( contained, `Expected ${entry} to be contained in the manifest`, ); } for (const entry of test.expected.notContainedEntries) { const contained = containsEntryInManifest(manifestEntries, entry); assertFalse( contained, `Expected ${entry} to *not* be contained in the manifest`, ); } }, }); } }, }); ================================================ FILE: src/utils/manifest_testdata/complex/a.txt ================================================ a ================================================ FILE: src/utils/manifest_testdata/complex/inner1/b.txt ================================================ b ================================================ FILE: src/utils/manifest_testdata/complex/inner2/b.txt ================================================ b ================================================ FILE: src/utils/manifest_testdata/single_file/a.txt ================================================ a ================================================ FILE: src/utils/manifest_testdata/two_levels/a.txt ================================================ a ================================================ FILE: src/utils/manifest_testdata/two_levels/inner/b.txt ================================================ b ================================================ FILE: src/utils/mod.ts ================================================ // Export functions used by `action/index.js` export { parseEntrypoint } from "./entrypoint.ts"; export { API, APIError } from "./api.ts"; export { convertPatternToRegExp, walk } from "./manifest.ts"; export { fromFileUrl, resolve } from "@std/path"; ================================================ FILE: src/utils/organization.ts ================================================ import { error } from "../error.ts"; import type { API } from "./api.ts"; import type { Organization } from "./api_types.ts"; import { interruptSpinner, wait } from "./spinner.ts"; export default { getByNameOrCreate: async ( api: API, name: string, ): Promise => { const interruptedSpinner = interruptSpinner(); let org; try { let spinner = wait( `You have specified the organization ${name}. Fetching details...`, ).start(); org = await api.getOrganizationByName(name); if (!org) { spinner.stop(); spinner = wait( `Organization '${name}' not found. Creating...`, ).start(); org = await api.createOrganization(name); spinner.succeed(`Created new organization '${org!.name}'.`); } else { spinner.stop(); } } catch (e) { error(e); } interruptedSpinner.resume(); return org; }, }; ================================================ FILE: src/utils/spinner.ts ================================================ import { type Spinner, type SpinnerOptions, wait as innerWait, } from "@denosaurs/wait"; let current: Spinner | null = null; export function wait(param: string | SpinnerOptions) { if (typeof param === "string") { param = { text: param }; } param.interceptConsole = false; current = innerWait({ stream: Deno.stderr, ...param }); return current; } export function interruptSpinner(): Interrupt { current?.stop(); const interrupt = new Interrupt(current); current = null; return interrupt; } export class Interrupt { #spinner: Spinner | null; constructor(spinner: Spinner | null) { this.#spinner = spinner; } resume() { current = this.#spinner; this.#spinner?.start(); } } ================================================ FILE: src/utils/time.ts ================================================ import { yellow } from "@std/fmt/colors"; export function renderTimeDelta(delta: number, language?: string): string { const sinces = [delta]; const sinceUnits = ["milli"]; if (sinces[0] >= 1000) { sinces.push(Math.floor(sinces[0] / 1000)); sinces[0] = sinces[0] % 1000; sinceUnits.push("second"); } if (sinces[1] >= 60) { sinces.push(Math.floor(sinces[1] / 60)); sinces[1] = sinces[1] % 60; sinceUnits.push("minute"); } if (sinces[2] >= 60) { sinces.push(Math.floor(sinces[2] / 60)); sinces[2] = sinces[2] % 60; sinceUnits.push("hour"); } if (sinces[3] >= 24) { sinces.push(Math.floor(sinces[3] / 24)); sinces[3] = sinces[3] % 24; sinceUnits.push("day"); } if (sinces.length > 1) { // remove millis if there are already seconds sinces.shift(); sinceUnits.shift(); } sinces.reverse(); sinceUnits.reverse(); let sinceStr = ""; for (let x = 0; x < sinces.length; x++) { const since = sinces[x]; let sinceUnit = sinceUnits[x]; if (since === 0) continue; if (sinceStr) { sinceStr += ", "; } if (sinces[x] > 1) { sinceUnit += "s"; } sinceStr += `${ since.toLocaleString(language ?? navigator.language) } ${sinceUnit}`; if (x === 0) { sinceStr = yellow(sinceStr); } } return sinceStr; } ================================================ FILE: src/utils/time_test.ts ================================================ import { yellow } from "@std/fmt/colors"; import { assertEquals } from "@std/assert/assert_equals"; import { renderTimeDelta } from "./time.ts"; Deno.test("renderTimeDelta returns time in milliseconds if below 1 second", () => { const result1 = renderTimeDelta(1); assertEquals(result1, yellow("1 milli")); const result2 = renderTimeDelta(999); assertEquals(result2, yellow("999 millis")); }); Deno.test("renderTimeDelta returns time only in seconds if above 1 second and below 1 minute", () => { const result1 = renderTimeDelta(1001); assertEquals(result1, yellow("1 second")); const result2 = renderTimeDelta(59000); assertEquals(result2, yellow("59 seconds")); }); Deno.test("renderTimeDelta returns time in minutes and seconds if above 1 minute and below 1 hour", () => { const result1 = renderTimeDelta(60000); assertEquals(result1, `${yellow("1 minute")}`); const result2 = renderTimeDelta(1 * 60 * 60 * 1000 - 1); assertEquals(result2, `${yellow("59 minutes")}, 59 seconds`); }); Deno.test("renderTimeDelta returns time in hours, minutes and seconds if above 1 hour and below 1 day", () => { const result1 = renderTimeDelta(1 * 60 * 60 * 1000); assertEquals(result1, `${yellow("1 hour")}`); const result2 = renderTimeDelta(1 * 24 * 60 * 60 * 1000 - 1); assertEquals(result2, `${yellow("23 hours")}, 59 minutes, 59 seconds`); }); Deno.test("renderTimeDelta returns time in days, hours, minutes and seconds if above 1 day", () => { const result1 = renderTimeDelta(1 * 24 * 60 * 60 * 1000); assertEquals(result1, `${yellow("1 day")}`); const result2 = renderTimeDelta(1_000_000 * 24 * 60 * 60 * 1000 - 1, "en-US"); assertEquals( result2, `${yellow("999,999 days")}, 23 hours, 59 minutes, 59 seconds`, ); }); ================================================ FILE: src/utils/token_storage/darwin.ts ================================================ import keychain from "npm:keychain@1.5.0"; const KEYCHAIN_CREDS = { account: "Deno Deploy", service: "DeployCTL" }; export function getFromKeychain(): Promise { return new Promise((resolve, reject) => keychain.getPassword( KEYCHAIN_CREDS, (err: KeychainError, token: string) => { if (err && err.code !== "PasswordNotFound") { reject(err); } else { resolve(token); } }, ) ); } export function storeInKeyChain(token: string): Promise { return new Promise((resolve, reject) => keychain.setPassword( { ...KEYCHAIN_CREDS, password: token }, (err: KeychainError) => { if (err) { reject(err); } else { resolve(); } }, ) ); } export function removeFromKeyChain(): Promise { return new Promise((resolve, reject) => keychain.deletePassword(KEYCHAIN_CREDS, (err: KeychainError) => { if (err && err.code !== "PasswordNotFound") { reject(err); } else { resolve(); } }) ); } interface KeychainError { code: string; } ================================================ FILE: src/utils/token_storage/fs.ts ================================================ import { getConfigPaths } from "../info.ts"; export async function get(): Promise { const { credentialsPath } = getConfigPaths(); try { const info = await Deno.lstat(credentialsPath); if (!info.isFile || (info.mode !== null && (info.mode & 0o777) !== 0o600)) { throw new Error( "The credentials file has been tampered with and will be ignored. Please delete it.", ); } } catch (e) { if (e instanceof Deno.errors.NotFound) { return null; } else { throw e; } } try { const token = JSON.parse(await Deno.readTextFile(credentialsPath)).token; return token || null; } catch (_) { throw new Error( `The credentials file has been tampered with and will be ignored. Please delete it.`, ); } } export async function store(token: string): Promise { const { credentialsPath, configDir } = getConfigPaths(); await Deno.mkdir(configDir, { recursive: true }); await Deno.writeTextFile( credentialsPath, JSON.stringify({ token }, null, 2), { mode: 0o600 }, ); return Promise.resolve(); } export async function remove(): Promise { const { credentialsPath, configDir } = getConfigPaths(); await Deno.mkdir(configDir, { recursive: true }); await Deno.writeTextFile(credentialsPath, "{}", { mode: 0o600 }); return Promise.resolve(); } ================================================ FILE: src/utils/token_storage/memory.ts ================================================ let TOKEN: string | null; export function get(): Promise { return Promise.resolve(TOKEN); } export function store(token: string): Promise { TOKEN = token; return Promise.resolve(); } export function remove(): Promise { TOKEN = null; return Promise.resolve(); } ================================================ FILE: src/utils/token_storage.ts ================================================ import { interruptSpinner, wait } from "./spinner.ts"; interface TokenStorage { get: () => Promise; store: (token: string) => Promise; remove: () => Promise; } let defaultMode = false; let module: TokenStorage; if (Deno.build.os === "darwin") { const darwin = await import("./token_storage/darwin.ts"); const memory = await import("./token_storage/memory.ts"); module = { get: defaultOnError( "Failed to get token from Keychain. Will provision a new token for this execution but please make sure to fix the issue afterwards.", memory.get, darwin.getFromKeychain, ), store: defaultOnError( "Failed to store token into Keychain. Will keep it in memory for the duration of this execution but please make sure to fix the issue afterwards.", memory.store, darwin.storeInKeyChain, ), remove: defaultOnError( "Failed to remove token from Keychain", memory.remove, darwin.removeFromKeyChain, ), }; } else { const fs = await import("./token_storage/fs.ts"); const memory = await import("./token_storage/memory.ts"); module = { get: defaultOnError( "Failed to get token from credentials file. Will provision a new token for this execution but please make sure to fix the issue afterwards.", memory.get, fs.get, ), store: defaultOnError( "Failed to store token in credentials file. Will keep it in memory for the duration of this execution but please make sure to fix the issue afterwards.", memory.store, fs.store, ), remove: defaultOnError( "Failed to remove token from credentials file", memory.remove, fs.remove, ), }; } export default module; function defaultOnError< // deno-lint-ignore no-explicit-any F extends (...args: any) => Promise, >( notification: string, defaultFn: (...params: Parameters) => ReturnType, fn: (...params: Parameters) => ReturnType, ): (...params: Parameters) => ReturnType { return (...params) => { if (defaultMode) { return defaultFn(...params); } else { return fn(...params) .catch((err) => { const spinnerInterrupt = interruptSpinner(); wait("").start().warn(notification); let errStr = err.message; if (errStr.length > 90) { errStr = errStr.slice(0, 90) + "..."; } wait({ text: "", indent: 3 }).start().fail(errStr); spinnerInterrupt.resume(); defaultMode = true; return defaultFn(...params); }) as ReturnType; } }; } ================================================ FILE: src/version.ts ================================================ export const VERSION = "1.13.1"; // Make sure to keep this in sync with the "old" version in `ci.yml` // Also don't forget to update README.md. export const MINIMUM_DENO_VERSION = "1.46.0"; ================================================ FILE: tests/config_file_test/config.json ================================================ {} ================================================ FILE: tests/config_file_test/config_file_test.ts ================================================ import { fromFileUrl } from "@std/path/from_file_url"; import configFile from "../../src/config_file.ts"; import { assert, assertEquals } from "@std/assert"; Deno.test("ConfigFile.diff returns array with additions and removals", async () => { const config = await configFile.read( fromFileUrl(new URL(import.meta.resolve("./config.json"))), ); assert(!!config); let changes = config.diff({}); assertEquals(changes, []); changes = config.diff({ project: "foo" }); assertEquals(changes, [{ key: "project", addition: "foo", removal: undefined, }]); // Using file URLs to avoid dealing with path normalization config.override({ project: "foo", entrypoint: "file://main.ts" }); changes = config.diff({ project: "bar", entrypoint: "file://src/main.ts" }); assertEquals(changes, [ { key: "project", removal: "foo", addition: "bar" }, { key: "entrypoint", removal: "file://main.ts", addition: "file://src/main.ts", }, ]); }); Deno.test("ConfigFile.diff reports inculde and exclude changes when one of the entries changed", async () => { const config = await configFile.read( fromFileUrl(new URL(import.meta.resolve("./config.json"))), ); assert(!!config); config.override({ include: ["foo", "bar"], exclude: ["fuzz", "bazz"] }); const changes = config.diff({ include: ["fuzz", "bazz"], exclude: ["foo", "bar"], }); assertEquals(changes, [ { key: "exclude", addition: ["foo", "bar"], removal: ["fuzz", "bazz"] }, { key: "include", removal: ["foo", "bar"], addition: ["fuzz", "bazz"] }, ]); }); Deno.test("ConfigFile.useAsDefaultFor can handle empty array defaults", async () => { const config = await configFile.read( fromFileUrl(new URL(import.meta.resolve("./config_with_include.json"))), ); assert(!!config); assertEquals(config.args().include?.[0], "**"); const args = { include: [], }; config.useAsDefaultFor(args); assertEquals(args.include[0], "**"); }); ================================================ FILE: tests/config_file_test/config_with_include.json ================================================ { "deploy": { "include": ["**"] } } ================================================ FILE: tests/env_vars_test/.another-env ================================================ BAR=bar ================================================ FILE: tests/env_vars_test/.overlapping-env ================================================ FOO=last ================================================ FILE: tests/env_vars_test/env_vars_test.ts ================================================ import { parseArgs } from "../../src/args.ts"; import { envVarsFromArgs } from "../../src/utils/env_vars.ts"; import { assert, assertEquals } from "@std/assert"; Deno.test("envVarsFromArgs gets env variables from multiple --env options", async () => { const args = parseArgs(["--env=FOO=foo", "--env=BAR=bar"]); const envVars = await envVarsFromArgs(args); assert(envVars !== null); assertEquals(Object.entries(envVars).length, 2); assertEquals(envVars.FOO, "foo"); assertEquals(envVars.BAR, "bar"); }); Deno.test("envVarsFromArgs last --env option takes precedence when overlapping", async () => { const args = parseArgs(["--env=FOO=foo", "--env=BAR=bar", "--env=FOO=last"]); const envVars = await envVarsFromArgs(args); assertEquals(envVars?.FOO, "last"); }); Deno.test("envVarsFromArgs gets env variables from multiple --env-file options", async () => { const args = parseArgs([ `--env-file=${import.meta.dirname}/.env`, `--env-file=${import.meta.dirname}/.another-env`, ]); const envVars = await envVarsFromArgs(args); assert(envVars !== null); assertEquals(Object.entries(envVars).length, 2); assertEquals(envVars.FOO, "foo"); assertEquals(envVars.BAR, "bar"); }); Deno.test("envVarsFromArgs last --env-file option takes precedence when overlapping", async () => { const args = parseArgs([ `--env-file=${import.meta.dirname}/.env`, `--env-file=${import.meta.dirname}/.another-env`, `--env-file=${import.meta.dirname}/.overlapping-env`, ]); const envVars = await envVarsFromArgs(args); assertEquals(envVars?.FOO, "last"); }); Deno.test("envVarsFromArgs --env always takes precedence over --env-file", async () => { const args = parseArgs([ "--env=FOO=winner", `--env-file=${import.meta.dirname}/.env`, `--env-file=${import.meta.dirname}/.another-env`, "--env=BAR=winner", ]); const envVars = await envVarsFromArgs(args); assertEquals(envVars?.FOO, "winner"); assertEquals(envVars?.BAR, "winner"); }); ================================================ FILE: tests/help_test.ts ================================================ import { assert, assertEquals, assertStringIncludes } from "@std/assert"; import { output, test } from "./utils.ts"; test({ args: [] }, async (proc) => { const [stdout, stderr, { code }] = await output(proc); assertStringIncludes(stderr, "SUBCOMMANDS:"); assertStringIncludes(stderr, "deploy "); assertStringIncludes(stderr, "upgrade "); assertEquals(code, 1); assertEquals(stdout, ""); }); test({ args: ["-V"] }, async (proc) => { const [stdout, stderr, { code }] = await output(proc); assertEquals(stderr, ""); assertEquals(code, 0); assert(stdout.startsWith("deployctl ")); }); test({ args: ["--version"] }, async (proc) => { const [stdout, stderr, { code }] = await output(proc); assertEquals(stderr, ""); assertEquals(code, 0); assert(stdout.startsWith("deployctl ")); }); test({ args: ["-h"] }, async (proc) => { const [stdout, stderr, { code }] = await output(proc); assertStringIncludes(stdout, "SUBCOMMANDS:"); assertStringIncludes(stdout, "deploy "); assertStringIncludes(stdout, "upgrade "); assertEquals(code, 0); assertEquals(stderr, ""); }); test({ args: ["deploy", "-h"] }, async (proc) => { const [stdout, stderr, { code }] = await output(proc); assertStringIncludes(stdout, "USAGE:"); assertStringIncludes(stdout, "deployctl deploy"); assertEquals(code, 0); assertEquals(stderr, ""); }); test({ args: ["upgrade", "-h"] }, async (proc) => { const [stdout, stderr, { code }] = await output(proc); assertStringIncludes(stdout, "deployctl upgrade"); assertStringIncludes(stdout, "USAGE:"); assertStringIncludes(stdout, "ARGS:"); assertEquals(code, 0); assertEquals(stderr, ""); }); ================================================ FILE: tests/utils.ts ================================================ import { lessThan as semverLessThan, parse as semverParse } from "@std/semver"; import { assert } from "@std/assert/assert"; import { MINIMUM_DENO_VERSION } from "../src/version.ts"; export interface Permissions { net: boolean; read: boolean; write: boolean; env: boolean; run: boolean; sys: boolean; } export function deployctl( args: string[], permissions: Permissions = { net: true, read: true, write: true, env: true, run: true, sys: true, }, ): Deno.ChildProcess { const deno = [ Deno.execPath(), "run", ]; if (permissions?.net) deno.push("--allow-net"); if (permissions?.read) deno.push("--allow-read"); if (permissions?.write) deno.push("--allow-write"); if (permissions?.env) deno.push("--allow-env"); if (permissions?.run) deno.push("--allow-run"); if (permissions?.sys) deno.push("--allow-sys"); deno.push("--quiet"); // Deno 1.x does not support lockfile v4. To work around this, we append // `--no-lock` in this case. const v2 = semverParse("2.0.0"); assert( semverLessThan(semverParse(MINIMUM_DENO_VERSION), v2), "We do not support Deno 1.x anymore. Please remove the `isDeno1` check below in the source code.", ); const isDeno1 = semverLessThan(semverParse(Deno.version.deno), v2); if (isDeno1) { deno.push("--no-lock"); } deno.push(new URL("../deployctl.ts", import.meta.url).toString()); const cmd = Deno.build.os == "linux" ? ["bash", "-c", [...deno, ...args].join(" ")] : [...deno, ...args]; return new Deno.Command(cmd[0], { args: cmd.slice(1), stdin: "null", stdout: "piped", stderr: "piped", }).spawn(); } export interface TestOptions { args: string[]; name?: string; permissions?: Permissions; } export function test( opts: TestOptions, fn: (proc: Deno.ChildProcess) => void | Promise, ) { const name = opts.name ?? ["deployctl", ...opts.args].join(" "); Deno.test(name, async () => { const proc = deployctl(opts.args, opts.permissions); await fn(proc); }); } export async function output( proc: Deno.ChildProcess, ): Promise<[string, string, Deno.CommandStatus]> { const [status, { stdout, stderr }] = await Promise.all([ proc.status, proc.output(), ]); return [ new TextDecoder().decode(stdout), new TextDecoder().decode(stderr), status, ]; } ================================================ FILE: tools/bundle.ts ================================================ // Copyright 2024 Deno Land Inc. All rights reserved. MIT license. import { bundle, type ImportMap } from "@deno/emit"; import { resolve } from "@std/path/resolve"; import { parse as parseJsonc } from "@std/jsonc"; const entrypoint = Deno.args[0]; const resolvedPath = resolve(Deno.cwd(), entrypoint); const configPath = resolve(Deno.cwd(), "deno.jsonc"); const config = await Deno.readTextFile(configPath); const result = await bundle(resolvedPath, { importMap: parseJsonc(config) as ImportMap, }); console.log(`// deno-fmt-ignore-file // deno-lint-ignore-file // This code was bundled using \`deno task build-action\` and it's not recommended to edit it manually `); console.log(result.code); ================================================ FILE: tools/version_match.ts ================================================ // Copyright 2023 Deno Land Inc. All rights reserved. MIT license. // This script ensures that version specifier defined in `src/version.ts` // matches the released tag version. // Intended to run when a draft release is created on GitHub. import { VERSION } from "../src/version.ts"; import { assertEquals } from "@std/assert/assert_equals"; const releaseTagVersion = Deno.env.get("RELEASE_TAG")!; assertEquals(VERSION, releaseTagVersion);