[
  {
    "path": ".codesandbox/tasks.json",
    "content": "{\n  // These tasks will run in order when initializing your CodeSandbox project.\n  \"setupTasks\": [\n    {\n      \"name\": \"Install Dependencies\",\n      \"command\": \"yarn install\"\n    }\n  ],\n\n  // These tasks can be run from CodeSandbox. Running one will open a log in the app.\n  \"tasks\": {\n    \"build\": {\n      \"name\": \"build\",\n      \"command\": \"yarn build\",\n      \"runAtStart\": false\n    },\n    \"build:git-extractor\": {\n      \"name\": \"build:git-extractor\",\n      \"command\": \"yarn build:git-extractor\",\n      \"runAtStart\": false\n    },\n    \"build:dependencies\": {\n      \"name\": \"build:dependencies\",\n      \"command\": \"yarn build:dependencies\",\n      \"runAtStart\": false\n    },\n    \"test\": {\n      \"name\": \"test\",\n      \"command\": \"yarn test\",\n      \"runAtStart\": false\n    },\n    \"start\": {\n      \"name\": \"start\",\n      \"command\": \"yarn start\",\n      \"runAtStart\": false\n    },\n    \"dev\": {\n      \"name\": \"dev\",\n      \"command\": \"yarn dev\",\n      \"runAtStart\": true,\n      \"preview\": {\n        \"port\": 2000\n      }\n    }\n  }\n}\n"
  },
  {
    "path": ".dockerignore",
    "content": ".git\n.gitignore\nREADME.md\ndocker-compose.yml\nnode_modules\nDockerfile\ndist\n# Ignore generated credentials from google-github-actions/auth\ngha-creds-*.json\n"
  },
  {
    "path": ".eslintrc.js",
    "content": "module.exports = {\n  extends: 'airbnb',\n  plugins: ['react', 'jest'],\n};\n"
  },
  {
    "path": ".github/workflows/build-image.yml",
    "content": "name: ci\n\non:\n  push:\n    branches:\n      - \"master\"\n\njobs:\n  docker:\n    runs-on: ubuntu-latest\n\n    permissions:\n      contents: \"read\"\n      id-token: \"write\"\n\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v3\n\n      - name: Set up Docker Buildx\n        uses: docker/setup-buildx-action@v3\n        with:\n          driver: docker\n\n      - name: Docker meta\n        id: meta\n        uses: docker/metadata-action@v5\n        with:\n          images: codesandbox/importers\n          tags: |\n            type=sha,prefix=\n\n      - uses: docker/login-action@v3\n        with:\n          username: ${{ secrets.DOCKERHUB_USERNAME }}\n          password: ${{ secrets.DOCKERHUB_TOKEN }}\n\n      - name: Build and push\n        uses: docker/build-push-action@v5\n        with:\n          context: .\n          push: ${{ github.event_name != 'pull_request' }}\n          tags: ${{ steps.meta.outputs.tags }}\n          labels: ${{ steps.meta.outputs.labels }}\n"
  },
  {
    "path": ".gitignore",
    "content": ".DS_Store\n\nnode_modules\nnpm-debug.log\nyarn-error.log\nbuild\npublic\ntemp\njest\n.aws\ndist\nlerna-debug.log\n"
  },
  {
    "path": ".prettierrc",
    "content": "{}\n"
  },
  {
    "path": "Dockerfile",
    "content": "FROM node:19-alpine as build\nWORKDIR /app\nCOPY . .\n\nRUN yarn\n\nRUN yarn build\n\nUSER node\n\nCMD [\"node\", \"./packages/git-extractor/dist/index.js\"]\n"
  },
  {
    "path": "LICENSE",
    "content": "Copyright (c) Ives van Hoorne\n\nUp is an Open Source project licensed under the terms of\nthe GPLv3 license. Please see <http://www.gnu.org/licenses/gpl-3.0.html>\nfor license text.\n"
  },
  {
    "path": "catalog-info.yaml",
    "content": "apiVersion: backstage.io/v1alpha1\nkind: Component\nmetadata:\n  name: github-importer\n  description: All importers & exporters for CodeSandbox \n  annotations:\n    github.com/project-slug: codesandbox/codesandbox-importers\n    backstage.io/kubernetes-namespace: default\n    backstage.io/kubernetes-label-selector: app.kubernetes.io/name=codesandbox,component=github-importers\n    codesandbox/deploy-image: codesandbox/importers\n    codesandbox/deploy-gitops-repo: codesandbox/codesandbox-gitops\n    codesandbox/deploy-gitops-yaml-key: \"githubImporters.image.tag\"\n    codesandbox/deploy-image-tag-regex-production: \".*\"\n    codesandbox/deploy-gitops-file-production: codesandbox-core/codesandbox/production/helm-chart-values/values.yaml\n    codesandbox/deploy-image-tag-regex-staging: \".*\"\n    codesandbox/deploy-gitops-file-staging: codesandbox-core/codesandbox/staging/helm-chart-values/values.yaml\n    codesandbox/deploy-gitops-version-type: tag\nspec:\n  type: service\n  lifecycle: production\n  owner: infra\n"
  },
  {
    "path": "lerna.json",
    "content": "{\n  \"lerna\": \"2.4.0\",\n  \"packages\": [\"packages/*\"],\n  \"version\": \"2.2.3\",\n  \"npmClient\": \"yarn\"\n}\n"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"codesandbox-importers\",\n  \"version\": \"1.0.0\",\n  \"description\": \"\",\n  \"private\": true,\n  \"scripts\": {\n    \"build\": \"yarn build:dependencies && lerna run build --scope codesandbox --scope git-converter --parallel\",\n    \"build:git-extractor\": \"yarn build:dependencies && lerna run build --scope git-converter\",\n    \"build:dependencies\": \"lerna run build --scope codesandbox-import-utils --scope codesandbox-import-util-types --stream\",\n    \"test\": \"lerna run test\",\n    \"start\": \"lerna run start --stream\",\n    \"dev\": \"lerna run dev --stream\"\n  },\n  \"author\": \"\",\n  \"devDependencies\": {\n    \"jest\": \"^29.6.2\",\n    \"lerna\": \"^7.1.5\",\n    \"prettier\": \"^2.2.1\",\n    \"rimraf\": \"^2.6.2\",\n    \"ts-jest\": \"^29.1.1\",\n    \"typescript\": \"^4.3.0\"\n  },\n  \"workspaces\": [\n    \"packages/cli\",\n    \"packages/git-extractor\",\n    \"packages/import-utils\",\n    \"packages/types\"\n  ]\n}\n"
  },
  {
    "path": "packages/cli/.gitignore",
    "content": "lib\n"
  },
  {
    "path": "packages/cli/LICENSE",
    "content": "Copyright (c) Ives van Hoorne\n\nUp is an Open Source project licensed under the terms of\nthe GPLv3 license. Please see <http://www.gnu.org/licenses/gpl-3.0.html>\nfor license text.\n"
  },
  {
    "path": "packages/cli/README.md",
    "content": "# codesandbox-cli\n\n> Upload your templates to codesandbox with a single command 🏖️\n\n[![Build Status](https://travis-ci.org/codesandbox/codesandbox-cli.svg?branch=master)](https://travis-ci.org/codesandbox/codesandbox-cli)\n\nThis is the command line interface for [CodeSandbox](https://codesandbox.io), an online editor\ntailored for web applications.\n\n## Quickstart\n\nYou can install the cli by running\n\n```bash\n# Install the cli\nnpm i -g codesandbox\n\n# Go to your project\ncd <path of your project>\n\n# Deploy your project to CodeSandbox\ncodesandbox ./\n```\n\n## Future features\n\n- Create a live connection with CodeSandbox using websockets so you can use your local editor\n\n## Current limitations\n\n- You need to be signed in to deploy, this is to prevent abuse\n\n## Inspiration\n\nI took a lot of inspiration from [now-cli](https://github.com/zeit/now-cli) and [preact-cli](https://github.com/developit/preact-cli) while building this.\n"
  },
  {
    "path": "packages/cli/package.json",
    "content": "{\n  \"name\": \"codesandbox\",\n  \"version\": \"2.2.3\",\n  \"description\": \"The CLI used for communicating with CodeSandbox\",\n  \"main\": \"lib/index.js\",\n  \"bin\": {\n    \"codesandbox\": \"./lib/index.js\"\n  },\n  \"author\": \"Ives van Hoorne\",\n  \"license\": \"MIT\",\n  \"scripts\": {\n    \"test\": \"echo Done && exit 0\",\n    \"test:watch\": \"jest --watch\",\n    \"build\": \"rimraf lib && tsc -p tsconfig.json\",\n    \"watch\": \"tsc --watch -p tsconfig.json\",\n    \"prepublish\": \"yarn build\"\n  },\n  \"repository\": \"codesandbox-app/codesandbox-importers\",\n  \"files\": [\n    \"lib\"\n  ],\n  \"keywords\": [\n    \"codesandbox\",\n    \"cli\",\n    \"editor\"\n  ],\n  \"dependencies\": {\n    \"axios\": \"^1.6.0\",\n    \"chalk\": \"^2.4.1\",\n    \"codesandbox-import-util-types\": \"^2.2.3\",\n    \"codesandbox-import-utils\": \"^2.2.3\",\n    \"commander\": \"^2.9.0\",\n    \"datauri\": \"^3.0.0\",\n    \"filesize\": \"^3.6.1\",\n    \"fs-extra\": \"^3.0.1\",\n    \"git-branch\": \"^1.0.0\",\n    \"git-repo-name\": \"^0.6.0\",\n    \"git-username\": \"^0.5.0\",\n    \"humps\": \"^2.0.1\",\n    \"inquirer\": \"^8.2.4\",\n    \"lodash\": \"^4.17.5\",\n    \"lz-string\": \"^1.4.4\",\n    \"ms\": \"^2.0.0\",\n    \"open\": \"^6.3.0\",\n    \"ora\": \"^1.3.0\",\n    \"shortid\": \"^2.2.8\",\n    \"update-notifier\": \"^2.2.0\"\n  },\n  \"devDependencies\": {\n    \"@types/commander\": \"^2.9.1\",\n    \"@types/filesize\": \"^3.6.0\",\n    \"@types/fs-extra\": \"^3.0.3\",\n    \"@types/humps\": \"^1.1.2\",\n    \"@types/inquirer\": \"^0.0.35\",\n    \"@types/jest\": \"^20.0.2\",\n    \"@types/lodash\": \"^4.14.106\",\n    \"@types/lz-string\": \"^1.3.32\",\n    \"@types/ms\": \"^0.7.29\",\n    \"@types/node\": \"^14\",\n    \"@types/ora\": \"^0.3.31\",\n    \"@types/shortid\": \"^0.0.29\",\n    \"@types/update-notifier\": \"^1.0.1\",\n    \"filesize\": \"^3.6.1\",\n    \"rimraf\": \"^2.6.1\",\n    \"tslint\": \"^5.4.3\",\n    \"tslint-config-prettier\": \"^1.10.0\"\n  },\n  \"jest\": {\n    \"transform\": {\n      \".(ts|tsx)\": \"<rootDir>../../node_modules/ts-jest/preprocessor.js\"\n    },\n    \"testEnvironment\": \"node\",\n    \"moduleFileExtensions\": [\n      \"ts\",\n      \"tsx\",\n      \"js\",\n      \"json\"\n    ],\n    \"testPathIgnorePatterns\": [\n      \"<rootDir>/node_modules/\",\n      \"<rootDir>/dist/\"\n    ],\n    \"testRegex\": \"(/__tests__/.*|\\\\.(test|spec))\\\\.(ts|tsx|js)$\"\n  },\n  \"gitHead\": \"3cdcdea389d39f2a92be73dcb73496f68c8ada41\"\n}\n"
  },
  {
    "path": "packages/cli/src/api/define.ts",
    "content": "import { getParameters } from \"codesandbox-import-utils/lib/api/define\";\n\nexport { getParameters };\n"
  },
  {
    "path": "packages/cli/src/cfg.ts",
    "content": "import { homedir } from \"os\";\n\nimport * as fs from \"fs-extra\";\nimport * as path from \"path\";\n\nimport * as api from \"./utils/api\";\nimport { error } from \"./utils/log\";\nimport { IS_STAGING } from \"./utils/env\";\n\n// tslint:disable no-var-requires\nconst ms = require(\"ms\");\nconst TTL = ms(\"8h\");\n\nexport interface IUser {\n  avatar_url: string;\n  email: string;\n  id: string;\n  name: string;\n  username: string;\n  jwt: string;\n}\n\nexport interface IConfig {\n  [key: string]: any | undefined;\n  lastUpdate?: number;\n  user?: IUser;\n}\n\nconst CONFIG_NAME = IS_STAGING\n  ? \".codesandbox-staging.json\"\n  : \".codesandbox.json\";\n\nconst file = process.env.CODESANDBOX_JSON\n  ? path.resolve(process.env.CODESANDBOX_JSON)\n  : path.resolve(homedir(), CONFIG_NAME);\n\n/**\n * Save config file\n *\n * @param {Object} data data to save\n */\nasync function save(data: object) {\n  await fs.writeFile(file, JSON.stringify(data, null, 2));\n}\n\n/**\n * Load and parse config file\n */\nexport async function read(): Promise<IConfig> {\n  let existing: IConfig = {};\n  try {\n    const fileData = await (fs.readFile(file, \"utf8\") as Promise<string>);\n    existing = JSON.parse(fileData);\n  } catch (err) {\n    /* Do nothing */\n  }\n\n  if (!existing.token) {\n    return {};\n  }\n\n  if (!existing.lastUpdate || Date.now() - existing.lastUpdate > TTL) {\n    const token = existing.token;\n    try {\n      const user = await api.fetchUser(token);\n\n      if (user) {\n        existing = { ...existing, user, lastUpdate: Date.now() };\n\n        await save(existing);\n      } else {\n        await deleteUser();\n      }\n    } catch (e) {\n      error(\"Could not authorize the user.\");\n      await deleteUser();\n    }\n  }\n\n  return existing;\n}\n\n// Removes a key from the config and store the result\nexport async function remove(key: string) {\n  const cfg = await read();\n  if (key in cfg) {\n    delete cfg[key];\n  }\n  await fs.writeFile(file, JSON.stringify(cfg, null, 2));\n}\n\n/**\n * Merge the given data in the current config\n * @param data\n */\nexport async function merge(data: object) {\n  const oldConfig = await read();\n  const cfg = { ...oldConfig, ...data };\n  await save(cfg);\n\n  return cfg;\n}\n\n/**\n * Delete given user from config\n *\n * @export\n */\nexport async function deleteUser() {\n  await save({});\n}\n\n/**\n * Save specific user in state\n *\n * @export\n * @param {User} user\n * @returns\n */\nexport function saveUser(token: string, user: IUser) {\n  return merge({ user, token, lastUpdate: Date.now() });\n}\n\n/**\n * Gets user from config\n *\n * @export\n * @returns\n */\nexport async function getUser(): Promise<IUser | undefined> {\n  const cfg = await read();\n  return cfg.user;\n}\n\nexport async function getToken(): Promise<string | undefined> {\n  const cfg = await read();\n  return cfg.token;\n}\n\nexport const removeFile = async () => fs.remove(file);\n"
  },
  {
    "path": "packages/cli/src/commands/deploy.ts",
    "content": "import chalk from \"chalk\";\nimport * as Commander from \"commander\";\nimport * as inquirer from \"inquirer\";\nimport * as filesize from \"filesize\";\nimport createSandbox from \"codesandbox-import-utils/lib/create-sandbox\";\nimport { join } from \"path\";\n\nimport { getUser } from \"../cfg\";\nimport { uploadSandbox } from \"../utils/api\";\nimport confirm from \"../utils/confirm\";\nimport { error, info, log, success } from \"../utils/log\";\nimport { createSandboxUrl } from \"../utils/url\";\nimport { login } from \"./login\";\n\nimport parseSandbox, { IUploads } from \"../utils/parse-sandbox\";\nimport FileError from \"../utils/parse-sandbox/file-error\";\nimport uploadFiles from \"../utils/parse-sandbox/upload-files\";\n\n// tslint:disable no-var-requires\nconst ora = require(\"ora\");\nconst MAX_MODULE_COUNT = 500;\nconst MAX_DIRECTORY_COUNT = 500;\n\n/**\n * Show warnings for the errors that occured during mapping of files, we\n * still give the user to continue deployment without those files.\n *\n * @param {string} resolvedPath\n * @param {FileError[]} errors\n */\nasync function showWarnings(resolvedPath: string, errors: FileError[]) {\n  if (errors.length > 0) {\n    console.log();\n    log(\n      chalk.yellow(\n        `There are ${chalk.bold(\n          errors.length.toString()\n        )} files that cannot be deployed:`\n      )\n    );\n    for (const err of errors) {\n      const relativePath = err.path.replace(resolvedPath, \"\");\n\n      log(`${chalk.yellow.bold(relativePath)}: ${err.message}`);\n    }\n    console.log();\n  }\n}\n\nasync function showUploads(resolvedPath: string, uploads: IUploads) {\n  if (Object.keys(uploads).length > 0) {\n    console.log();\n    log(\n      chalk.blue(\n        `We will upload ${\n          Object.keys(uploads).length\n        } static files to your CodeSandbox upload storage:`\n      )\n    );\n    Object.keys(uploads).forEach((path) => {\n      const relativePath = path.replace(resolvedPath, \"\");\n      log(\n        `${chalk.yellow.bold(relativePath)}: ${filesize(\n          uploads[path].byteLength\n        )}`\n      );\n    });\n    console.log();\n  }\n}\n\nexport default function registerCommand(program: typeof Commander) {\n  program\n    .command(\"deploy <path>\")\n    .alias(\"*\")\n    .description(\n      `deploy an application to CodeSandbox ${chalk.bold(\"(default)\")}`\n    )\n    .action(async (path) => {\n      const user = await getUser();\n\n      if (!user) {\n        info(\"You need to sign in before you can deploy applications\");\n        const confirmed = await confirm(\"Do you want to sign in using GitHub?\");\n\n        if (!confirmed) {\n          return;\n        }\n\n        await login();\n      }\n\n      info(`Deploying ${path} to CodeSandbox`);\n      try {\n        let resolvedPath = join(\"./\", path);\n\n        if (resolvedPath.endsWith(\"/\")) {\n          resolvedPath = resolvedPath.slice(0, -1);\n        }\n\n        const fileData = await parseSandbox(resolvedPath);\n\n        // Show files that will be uploaded\n        await showUploads(resolvedPath, fileData.uploads);\n\n        // Show warnings for all errors\n        await showWarnings(resolvedPath, fileData.errors);\n\n        info(\n          \"By deploying to CodeSandbox, the code of your project will be made \" +\n            chalk.bold(\"public\")\n        );\n\n        const acceptPublic = await confirm(\n          \"Are you sure you want to proceed with the deployment?\",\n          true\n        );\n        if (!acceptPublic) {\n          return;\n        }\n\n        let finalFiles = fileData.files;\n        const spinner = ora(\"\").start();\n        if (Object.keys(fileData.uploads).length) {\n          spinner.text = \"Uploading files to CodeSandbox\";\n\n          const uploadedFiles = await uploadFiles(fileData.uploads);\n\n          finalFiles = { ...finalFiles, ...uploadedFiles };\n        }\n\n        const sandbox = await createSandbox(finalFiles);\n\n        if (sandbox.modules.length > MAX_MODULE_COUNT) {\n          throw new Error(\n            `This project is too big, it contains ${sandbox.modules.length} files which is more than the max of ${MAX_MODULE_COUNT}.`\n          );\n        }\n\n        if (sandbox.directories.length > MAX_DIRECTORY_COUNT) {\n          throw new Error(\n            `This project is too big, it contains ${sandbox.directories.length} directories which is more than the max of ${MAX_DIRECTORY_COUNT}.`\n          );\n        }\n\n        spinner.text = \"Deploying to CodeSandbox\";\n\n        try {\n          const sandboxData = await uploadSandbox(sandbox);\n\n          spinner.stop();\n\n          success(\n            \"Succesfully created the sandbox, you can find the sandbox here:\"\n          );\n          success(createSandboxUrl(sandboxData));\n        } catch (e) {\n          spinner.stop();\n\n          error(\"Something went wrong while uploading to the API\");\n          error(e.message);\n        }\n      } catch (e) {\n        error(e.message);\n      }\n    });\n}\n"
  },
  {
    "path": "packages/cli/src/commands/login.ts",
    "content": "import * as http from \"http\";\nimport * as inquirer from \"inquirer\";\nimport { omit } from \"lodash\";\nimport * as open from \"open\";\nimport ora = require(\"ora\");\n\nimport * as cfg from \"../cfg\";\n\nimport * as api from \"../utils/api\";\nimport confirm from \"../utils/confirm\";\nimport { error, info } from \"../utils/log\";\nimport { LOGIN_URL as CLI_LOGIN_URL } from \"../utils/url\";\n\n// TYPES\nimport * as Commander from \"commander\";\n\n/**\n * Start the sign in process by opening CodeSandbox CLI login url, this page\n * will show a token that the user will have to fill in in the CLI\n *\n * @returns\n */\nasync function handleSignIn() {\n  // Open specific url\n  info(`Opening ${CLI_LOGIN_URL}`);\n  open(CLI_LOGIN_URL, { wait: false });\n\n  const { authToken } = await inquirer.prompt([\n    {\n      message: \"Token:\",\n      name: \"authToken\",\n      type: \"input\",\n    },\n  ]);\n\n  // We got the token! Ask the server on authorization\n  const spinner = ora(\"Fetching user...\").start();\n  try {\n    const { token, user } = await api.verifyUser(authToken);\n\n    // Save definite token and user to config\n    spinner.text = \"Saving user...\";\n    await cfg.saveUser(token, user);\n    spinner.stop();\n\n    return user;\n  } catch (e) {\n    spinner.stop();\n    throw e;\n  }\n}\n\nexport async function login() {\n  info(\"We will open CodeSandbox and show an authorization token.\");\n  info(\"You'll need enter this token in the CLI to sign in.\");\n\n  const confirmed = await confirm(\n    \"We will open CodeSandbox to finish the login process.\"\n  );\n\n  console.log();\n\n  if (confirmed) {\n    try {\n      const user = await handleSignIn();\n\n      info(`Succesfully signed in as ${user.username}!`);\n    } catch (e) {\n      error(\"Something went wrong while signing in: \" + e.message);\n    }\n  }\n}\n\nexport default function registerCLI(program: typeof Commander) {\n  program\n    .command(\"login\")\n    .description(\"sign in to your CodeSandbox account or create a new one\")\n    .option(\"-s\", \"don't ask for sign in if you're already signed in\")\n    .action(async (cmd) => {\n      const user = await cfg.getUser();\n      const silent = !!cmd.S;\n\n      if (user) {\n        if (silent) {\n          return;\n        }\n\n        const confirmed = await confirm(\n          \"You are already logged in, would you like to sign out first?\"\n        );\n\n        if (confirmed) {\n          await cfg.deleteUser();\n        } else {\n          return;\n        }\n      }\n\n      await login();\n    });\n}\n"
  },
  {
    "path": "packages/cli/src/commands/logout.ts",
    "content": "import * as Commander from \"commander\";\n\nimport { deleteUser, getUser } from \"../cfg\";\nimport confirm from \"../utils/confirm\";\nimport { error, info } from \"../utils/log\";\n\nexport default function registerCLI(program: typeof Commander) {\n  program\n    .command(\"logout\")\n    .description(\"sign out from CodeSandbox\")\n    .action(async () => {\n      const user = await getUser();\n      if (user) {\n        const confirmed = await confirm(\"Are you sure you want to log out?\");\n\n        if (confirmed) {\n          await deleteUser();\n          info(\"Succesfully logged out\");\n        }\n      } else {\n        error(\"You are already signed out\");\n      }\n    });\n}\n"
  },
  {
    "path": "packages/cli/src/commands/token.ts",
    "content": "import * as cfg from \"../cfg\";\n\n// TYPES\nimport * as Commander from \"commander\";\n\nexport default function registerToken(program: typeof Commander) {\n  program\n    .command(\"token\")\n    .description(\"get your login token to CodeSandbox\")\n    .action(async () => {\n      const token = await cfg.getToken();\n\n      if (token === undefined) {\n        process.exit(1);\n      }\n\n      console.log(token);\n    });\n}\n"
  },
  {
    "path": "packages/cli/src/github/url.ts",
    "content": "import * as fs from \"fs\";\nimport { basename, dirname } from \"path\";\n\n/* tslint:disable no-var-requires */\nconst branch = require(\"git-branch\");\nconst username = require(\"git-username\");\nconst repoName = require(\"git-repo-name\");\n/* tslint:enable */\n\nexport interface IOptions {\n  examplePath?: string;\n  openedModule?: string;\n  fontSize?: number;\n  highlightedLines?: number[];\n  editorSize?: number;\n  view?: \"editor\" | \"preview\";\n  hideNavigation?: boolean;\n  currentModuleView?: boolean;\n  autoResize?: boolean;\n  useCodeMirror?: boolean;\n  enableEslint?: boolean;\n  forceRefresh?: boolean;\n  expandDevTools?: boolean;\n  initialPath?: string;\n  gitRepo?: string;\n  gitUsername?: string;\n  gitBranch?: string;\n}\n\nfunction optionsToParameterizedUrl(options: { [option: string]: any }) {\n  const keyValues = Object.keys(options)\n    .sort()\n    .filter((x) => options[x])\n    .map(\n      (key) => `${encodeURIComponent(key)}=${encodeURIComponent(options[key])}`\n    )\n    .join(\"&\");\n\n  return keyValues ? `?${keyValues}` : \"\";\n}\n\nfunction getUrlOptions(options: IOptions) {\n  const {\n    view,\n    autoResize,\n    hideNavigation,\n    currentModuleView,\n    fontSize,\n    initialPath,\n    enableEslint,\n    useCodeMirror,\n    expandDevTools,\n    forceRefresh,\n    openedModule,\n  } = options;\n\n  const results: { [option: string]: any } = {};\n\n  results.module = openedModule;\n  results.view = view;\n  results.initialpath = initialPath;\n\n  if (autoResize) {\n    results.autoresize = 1;\n  }\n\n  if (hideNavigation) {\n    results.hidenavigation = 1;\n  }\n\n  if (currentModuleView) {\n    results.moduleview = 1;\n  }\n\n  if (enableEslint) {\n    results.eslint = 1;\n  }\n\n  if (expandDevTools) {\n    results.expanddevtools = 1;\n  }\n\n  if (useCodeMirror) {\n    results.codemirror = 1;\n  }\n\n  if (forceRefresh) {\n    results.forcerefresh = 1;\n  }\n\n  if (fontSize !== 14) {\n    results.fontsize = fontSize;\n  }\n\n  if (initialPath) {\n    results.initialpath = initialPath;\n  }\n\n  if (expandDevTools) {\n    results.expanddevtools = 1;\n  }\n\n  return optionsToParameterizedUrl(results);\n}\n\nconst CODESANDBOX_ROOT = `https://codesandbox.io`;\n\nfunction findGitRoot() {\n  let currentPath = __dirname;\n\n  while (\n    !fs.readdirSync(currentPath).find((f) => basename(f) === \".git\") &&\n    currentPath !== \"/\"\n  ) {\n    currentPath = dirname(currentPath);\n  }\n\n  if (currentPath === \"/\") {\n    throw new Error(\"Could not find .git folder\");\n  }\n\n  return currentPath;\n}\n\nfunction getRepoPath(options: IOptions) {\n  const gitPath = findGitRoot();\n  let currentBranch;\n  let currentUsername;\n  const currentRepo = options.gitRepo || repoName.sync(gitPath);\n\n  // Check whether the build is happening on Netlify\n  if (process.env.REPOSITORY_URL) {\n    const usernameParts = process.env.REPOSITORY_URL.match(\n      /github.com[:|\\/](.*)\\/reactjs\\.org/\n    );\n\n    if (usernameParts) {\n      currentUsername = usernameParts[1];\n    }\n    currentBranch = process.env.BRANCH;\n  } else {\n    currentBranch = branch.sync(gitPath);\n    currentUsername = username(gitPath);\n  }\n\n  currentBranch = currentBranch || options.gitBranch;\n  currentUsername = currentUsername || options.gitUsername;\n\n  if (!currentBranch) {\n    throw new Error(\"Could not fetch branch from the git info.\");\n  }\n  if (!currentUsername) {\n    throw new Error(\"Could not fetch username from the git info.\");\n  }\n  if (!currentRepo) {\n    throw new Error(\"Could not fetch repository from the git info.\");\n  }\n\n  let path = `${currentUsername}/${currentRepo}/tree/${currentBranch}`;\n\n  if (options.examplePath) {\n    path += \"/\" + options.examplePath;\n  }\n\n  return path;\n}\n\nfunction getFullUrl(type: \"s\" | \"embed\", options: IOptions) {\n  const gitPath = getRepoPath(options);\n  const urlOptions = getUrlOptions(options);\n\n  return `${CODESANDBOX_ROOT}/${type}/github/${gitPath}${urlOptions}`;\n}\n\nexport function getSandboxUrl(options?: IOptions) {\n  return getFullUrl(\"s\", options || {});\n}\n\nexport function getEmbedUrl(options?: IOptions) {\n  return getFullUrl(\"embed\", options || {});\n}\n"
  },
  {
    "path": "packages/cli/src/index.ts",
    "content": "#!/usr/bin/env node\nimport * as program from \"commander\";\nimport * as updateNotifier from \"update-notifier\";\n\n// Commands\nimport deployCommand from \"./commands/deploy\";\nimport loginCommand from \"./commands/login\";\nimport logoutCommand from \"./commands/logout\";\nimport tokenCommand from \"./commands/token\";\n\nimport { extraHelp, logCodeSandbox } from \"./utils/log\";\n\n// tslint:disable no-var-requires\nconst packageInfo = require(\"../package.json\");\n\nprogram.version(packageInfo.version);\n\nprogram.on(\"--help\", extraHelp);\n\n// Register commands\ndeployCommand(program);\nloginCommand(program);\ntokenCommand(program);\nlogoutCommand(program);\n\nprogram.parse(process.argv);\n\nif (!process.argv.slice(2).length) {\n  console.log();\n  logCodeSandbox();\n  console.log();\n\n  program.outputHelp();\n}\n\nupdateNotifier({ pkg: packageInfo }).notify();\n"
  },
  {
    "path": "packages/cli/src/utils/api.ts",
    "content": "import axios, { AxiosRequestConfig } from \"axios\";\nimport { ISandbox } from \"codesandbox-import-util-types\";\nimport { values } from \"lodash\";\nimport { decamelizeKeys } from \"humps\";\n\nimport { getToken } from \"../cfg\";\nimport {\n  CREATE_SANDBOX_URL,\n  GET_USER_URL,\n  verifyUserTokenUrl,\n  CREATE_UPLOAD_URL,\n} from \"./url\";\n\n// tslint:disable-next-line:no-var-requires\nconst DatauriParser = require(\"datauri/parser\");\n\nconst callApi = async (options: AxiosRequestConfig) => {\n  try {\n    const response = await axios(options);\n    return response.data.data;\n  } catch (e) {\n    if (e.response && e.response.data && e.response.data.errors) {\n      e.message = values(e.response.data.errors)[0];\n    }\n    throw e;\n  }\n};\n\nexport async function uploadSandbox(sandbox: ISandbox) {\n  const token = await getToken();\n\n  if (token == null) {\n    throw new Error(\"You're not signed in\");\n  }\n\n  const sandboxData = {\n    ...decamelizeKeys(sandbox),\n    from_cli: true,\n  };\n\n  const options: AxiosRequestConfig = {\n    data: {\n      sandbox: sandboxData,\n    },\n    headers: {\n      Authorization: `Bearer ${token}`,\n    },\n    method: \"POST\",\n    url: CREATE_SANDBOX_URL,\n  };\n\n  return callApi(options);\n}\n\nexport async function fetchUser(token: string) {\n  const Authorization = `Bearer ${token}`;\n  const options: AxiosRequestConfig = {\n    headers: {\n      Authorization,\n    },\n    method: \"GET\",\n    url: GET_USER_URL,\n  };\n\n  return callApi(options);\n}\n\nexport async function verifyUser(token: string) {\n  const options: AxiosRequestConfig = {\n    method: \"GET\",\n    url: verifyUserTokenUrl(token),\n  };\n\n  return callApi(options);\n}\n\nexport async function createUpload(filename: string, buffer: Buffer) {\n  const parser = new DatauriParser();\n\n  parser.format(filename, buffer);\n  const uri = parser.content;\n\n  const token = await getToken();\n\n  if (token == null) {\n    throw new Error(\"You're not signed in\");\n  }\n\n  const options: AxiosRequestConfig = {\n    data: {\n      name: filename,\n      content: uri,\n    },\n    headers: {\n      Authorization: `Bearer ${token}`,\n    },\n    method: \"POST\",\n    url: CREATE_UPLOAD_URL,\n  };\n\n  return callApi(options);\n}\n"
  },
  {
    "path": "packages/cli/src/utils/confirm.ts",
    "content": "import * as inquirer from \"inquirer\";\n\nexport default async function confirm(question: string, defaultNo = false) {\n  const { confirmed } = await inquirer.prompt([\n    {\n      default: !defaultNo,\n      message: question,\n      name: \"confirmed\",\n      type: \"confirm\",\n    },\n  ]);\n  return confirmed;\n}\n"
  },
  {
    "path": "packages/cli/src/utils/env.ts",
    "content": "export const IS_STAGING = process.env.CODESANDBOX_NODE_ENV === \"development\";\n"
  },
  {
    "path": "packages/cli/src/utils/log.ts",
    "content": "import chalk from \"chalk\";\n\nexport function log(text = \"\") {\n  console.log(`> ${text}`);\n}\n\nexport function logCodeSandbox() {\n  console.log(\n    `  ${chalk.blue.bold(\"Code\")}${chalk.yellow.bold(\"Sandbox\")} ${chalk.bold(\n      \"CLI\"\n    )}`\n  );\n  console.log(\"  The official CLI for uploading projects to CodeSandbox\");\n}\n\nexport function extraHelp() {\n  console.log(\"\");\n  console.log(\"  Notes:\");\n  console.log();\n  console.log(\"    - You can only use the CLI if you are logged in\");\n  console.log();\n\n  console.log(\"  Examples:\");\n  console.log(\"\");\n  console.log(chalk.gray(\"    Deploy current directory:\"));\n  console.log();\n  console.log(\"    $ codesandbox ./\");\n  console.log();\n  console.log(chalk.gray(\"    Deploy custom directory:\"));\n  console.log();\n  console.log(\"    $ codesandbox /usr/src/project\");\n  console.log(\"\");\n}\n\nexport function info(text: string) {\n  log(chalk.blue(text));\n}\n\nexport function error(text: string) {\n  console.log();\n  log(chalk.red(`[error] ${text}`));\n  console.log();\n}\n\nexport function warn(text: string) {\n  log(chalk.yellow(`[warn] ${text}`));\n}\n\nexport function success(text: string) {\n  log(chalk.green(`[success] ${text}`));\n}\n"
  },
  {
    "path": "packages/cli/src/utils/parse-sandbox/file-error.ts",
    "content": "export default class FileError extends Error {\n  public path: string;\n  public isBinary: boolean;\n\n  /**\n   * Creates an instance of FileError.\n   * @param {string} message\n   * @param {string} path\n   * @param {boolean} [isBinary=false] Whether the error was caused because the file is binary\n   * @memberof FileError\n   */\n  constructor(message: string, path: string, isBinary = false) {\n    super(message);\n\n    this.path = path;\n    this.isBinary = isBinary;\n  }\n}\n"
  },
  {
    "path": "packages/cli/src/utils/parse-sandbox/index.ts",
    "content": "import * as fs from \"fs-extra\";\nimport * as path from \"path\";\nimport { isText, isTooBig } from \"codesandbox-import-utils/lib/is-text\";\nimport { IModule, INormalizedModules } from \"codesandbox-import-util-types\";\n\nimport FileError from \"./file-error\";\n\nconst MAX_FILE_SIZE = 5 * 1024 * 1024;\n\nexport interface IUploads {\n  [path: string]: Buffer;\n}\n\nasync function normalizeFilesInDirectory(\n  p: string,\n  startingPath: string\n): Promise<{\n  errors: FileError[];\n  uploads: IUploads;\n  files: INormalizedModules;\n}> {\n  const entries = await fs.readdir(p);\n  const dirs: string[] = [];\n  const files: string[] = [];\n  const errors: FileError[] = [];\n  let uploads: IUploads = {};\n\n  await Promise.all(\n    entries.map(async (e) => {\n      const absolutePath = path.join(p, e);\n      const stat = await fs.stat(absolutePath);\n\n      if (stat.isDirectory()) {\n        if (e !== \"node_modules\" && e !== \".git\") {\n          dirs.push(absolutePath);\n        }\n      } else {\n        files.push(absolutePath);\n      }\n    })\n  );\n\n  const recursiveDirs: { [path: string]: IModule } = (\n    await Promise.all(\n      dirs.map((d) => normalizeFilesInDirectory(d, startingPath))\n    )\n  ).reduce((prev, next) => {\n    next.errors.forEach((e) => {\n      errors.push(e);\n    });\n\n    uploads = { ...next.uploads, ...uploads };\n\n    return { ...prev, ...next.files };\n  }, {});\n\n  const fileData = (\n    await Promise.all(\n      files.map(async (t) => {\n        const code = await fs.readFile(t);\n\n        const relativePath = t.replace(startingPath + \"/\", \"\");\n        const isBinary = !(await isText(t, code));\n        if (isBinary) {\n          if (code.byteLength > MAX_FILE_SIZE) {\n            errors.push(\n              new FileError(\n                isTooBig(code) ? \"Is too big\" : \"Is a binary file\",\n                relativePath,\n                true\n              )\n            );\n            return false;\n          } else {\n            uploads[relativePath] = code;\n            return false;\n          }\n        }\n\n        return { path: relativePath, code: code.toString() };\n      })\n    )\n  ).reduce((prev, next) => {\n    if (next === false) {\n      return prev;\n    }\n\n    return {\n      ...prev,\n      [next.path]: { content: next.code },\n    };\n  }, {});\n\n  return { errors, uploads, files: { ...recursiveDirs, ...fileData } };\n}\n\nconst exists = async (p: string) => {\n  try {\n    const stat = await fs.stat(p);\n    return true;\n  } catch (e) {\n    return false;\n  }\n};\n\n/**\n * This will take a path and return all parameters that are relevant for the call\n * to the CodeSandbox API fir creating a sandbox\n *\n * @export\n * @param {string} path\n */\nexport default async function parseSandbox(resolvedPath: string) {\n  const dirExists = await exists(resolvedPath);\n  if (!dirExists) {\n    throw new Error(`The given path (${resolvedPath}) doesn't exist.`);\n  }\n\n  const fileData = await normalizeFilesInDirectory(resolvedPath, resolvedPath);\n\n  return fileData;\n}\n"
  },
  {
    "path": "packages/cli/src/utils/parse-sandbox/upload-files.ts",
    "content": "import { IUploads } from \".\";\nimport { createUpload } from \"../api\";\nimport { INormalizedModules } from \"codesandbox-import-util-types\";\n\nexport default async function uploadFiles(uploads: IUploads) {\n  const files: INormalizedModules = {};\n\n  const uploadPaths = Object.keys(uploads);\n  for (const uploadPath of uploadPaths) {\n    const buffer = uploads[uploadPath];\n\n    const res: { url: string } = await createUpload(uploadPath, buffer);\n\n    files[uploadPath] = {\n      content: res.url,\n      isBinary: true,\n    };\n  }\n\n  return files;\n}\n"
  },
  {
    "path": "packages/cli/src/utils/url.ts",
    "content": "import { IS_STAGING } from \"./env\";\n\nexport const BASE_URL = IS_STAGING\n  ? \"https://codesandbox.stream\"\n  : \"https://codesandbox.io\";\n\nexport const CREATE_SANDBOX_URL = BASE_URL + \"/api/v1/sandboxes\";\nexport const CREATE_UPLOAD_URL =\n  BASE_URL + \"/api/v1/users/current_user/uploads\";\nexport const GET_USER_URL = BASE_URL + \"/api/v1/users/current\";\nexport const LOGIN_URL = BASE_URL + \"/cli/login\";\n\nconst VERIFY_USER_TOKEN_URL = BASE_URL + \"/api/v1/auth/verify/\";\nexport const verifyUserTokenUrl = (token: string) =>\n  VERIFY_USER_TOKEN_URL + token;\n\nexport const createSandboxUrl = (sandbox: { id: string }) =>\n  BASE_URL + \"/s/\" + sandbox.id;\n"
  },
  {
    "path": "packages/cli/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    /* Basic Options */\n    \"target\": \"es3\" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', or 'ESNEXT'. */,\n    \"module\": \"commonjs\" /* Specify module code generation: 'commonjs', 'amd', 'system', 'umd' or 'es2015'. */,\n    \"lib\": [\n      \"es2015\",\n      \"es6\",\n      \"dom\"\n    ] /* Specify library files to be included in the compilation:  */,\n    // \"allowJs\": true,                       /* Allow javascript files to be compiled. */\n    // \"checkJs\": true,                       /* Report errors in .js files. */\n    // \"jsx\": \"preserve\",                     /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */\n    \"declaration\": true /* Generates corresponding '.d.ts' file. */,\n    \"sourceMap\": true /* Generates corresponding '.map' file. */,\n    // \"outFile\": \"./\",                       /* Concatenate and emit output to single file. */\n    \"outDir\": \"./lib\" /* Redirect output structure to the directory. */,\n    // \"rootDir\": \"./\",                       /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */\n    // \"removeComments\": true,                /* Do not emit comments to output. */\n    // \"noEmit\": true,                        /* Do not emit outputs. */\n    // \"importHelpers\": true,                 /* Import emit helpers from 'tslib'. */\n    // \"downlevelIteration\": true,            /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */\n    // \"isolatedModules\": true,               /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */\n    /* Strict Type-Checking Options */\n    \"strict\": true /* Enable all strict type-checking options. */,\n    // \"noImplicitAny\": true,                 /* Raise error on expressions and declarations with an implied 'any' type. */\n    // \"strictNullChecks\": true,              /* Enable strict null checks. */\n    // \"noImplicitThis\": true,                /* Raise error on 'this' expressions with an implied 'any' type. */\n    // \"alwaysStrict\": true,                  /* Parse in strict mode and emit \"use strict\" for each source file. */\n    /* Additional Checks */\n    // \"noUnusedLocals\": true,                /* Report errors on unused locals. */\n    // \"noUnusedParameters\": true,            /* Report errors on unused parameters. */\n    // \"noImplicitReturns\": true,             /* Report error when not all code paths in function return a value. */\n    // \"noFallthroughCasesInSwitch\": true,    /* Report errors for fallthrough cases in switch statement. */\n    /* Module Resolution Options */\n    // \"moduleResolution\": \"node\",            /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */\n    // \"baseUrl\": \"./\",                       /* Base directory to resolve non-absolute module names. */\n    // \"paths\": {},                           /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */\n    // \"rootDirs\": [],                        /* List of root folders whose combined content represents the structure of the project at runtime. */\n    // \"typeRoots\": []                        /* List of folders to include type definitions from. */\n    // \"types\": []                            /* Type declaration files to be included in compilation. */\n    // \"allowSyntheticDefaultImports\": true,  /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */\n    /* Source Map Options */\n    // \"sourceRoot\": \"./\",                    /* Specify the location where debugger should locate TypeScript files instead of source locations. */\n    // \"mapRoot\": \"./\",                       /* Specify the location where debugger should locate map files instead of generated locations. */\n    // \"inlineSourceMap\": true,               /* Emit a single file with source maps instead of having a separate file. */\n    // \"inlineSources\": true,                 /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */\n    /* Experimental Options */\n    // \"experimentalDecorators\": true,        /* Enables experimental support for ES7 decorators. */\n    // \"emitDecoratorMetadata\": true,         /* Enables experimental support for emitting type metadata for decorators. */\n    \"skipLibCheck\": true,\n    \"useUnknownInCatchVariables\": false\n  },\n  \"include\": [\"src/**/*.ts\", \"node_modules/**/*.ts\", \"typings/**/*.ts\"],\n  \"exclude\": [\"__tests__\", \"build\", \"**/*.test.ts\", \"temp\"]\n}\n"
  },
  {
    "path": "packages/cli/tslint.json",
    "content": "{\n  \"defaultSeverity\": \"error\",\n  \"extends\": [\"tslint:latest\", \"tslint-config-prettier\"],\n  \"jsRules\": {},\n  \"rules\": {\n    \"no-console\": [false],\n    \"no-implicit-dependencies\": false,\n    \"no-submodule-imports\": false,\n    \"ordered-imports\": false,\n    \"object-literal-sort-keys\": false\n  },\n  \"rulesDirectory\": []\n}\n"
  },
  {
    "path": "packages/cli/typings/extensions/json.d.ts",
    "content": "declare module \"*.json\" {\n  const package: {\n    name: string;\n    version: string;\n  };\n  export = package;\n}\n"
  },
  {
    "path": "packages/git-extractor/.gitignore",
    "content": "config/production.json\n"
  },
  {
    "path": "packages/git-extractor/config/.gitkeep",
    "content": ""
  },
  {
    "path": "packages/git-extractor/package.json",
    "content": "{\n  \"name\": \"git-converter\",\n  \"version\": \"2.2.3\",\n  \"description\": \"\",\n  \"main\": \"index.js\",\n  \"private\": true,\n  \"scripts\": {\n    \"build\": \"rimraf dist && yarn compile\",\n    \"compile\": \"tsc\",\n    \"start\": \"node dist\",\n    \"dev\": \"nodemon --watch 'src/**/*.ts' --exec 'ts-node' src/index.ts\",\n    \"test\": \"jest\",\n    \"test:watch\": \"jest --watch\"\n  },\n  \"author\": \"\",\n  \"license\": \"ISC\",\n  \"devDependencies\": {\n    \"@types/acorn\": \"^4.0.2\",\n    \"@types/base-64\": \"^0.1.3\",\n    \"@types/debug\": \"^0.0.29\",\n    \"@types/humps\": \"^1.1.2\",\n    \"@types/jest\": \"^19.2.4\",\n    \"@types/jszip\": \"^3.4.1\",\n    \"@types/koa\": \"^2.0.39\",\n    \"@types/koa-bodyparser\": \"^3.0.25\",\n    \"@types/koa__router\": \"^8.0.3\",\n    \"@types/lodash.pickby\": \"^4.6.2\",\n    \"@types/lru-cache\": \"^4.1.0\",\n    \"@types/node\": \"^14\",\n    \"@types/node-fetch\": \"^1.6.7\",\n    \"@types/redis\": \"^2.6.0\",\n    \"@types/shortid\": \"^0.0.29\",\n    \"eslint\": \"^3.12.2\",\n    \"eslint-config-airbnb\": \"^13.0.0\",\n    \"eslint-plugin-import\": \"2.1.0\",\n    \"eslint-plugin-jsx-a11y\": \"2.2.3\",\n    \"eslint-plugin-react\": \"^6.8.0\",\n    \"nodemon\": \"^2.0.4\",\n    \"ts-node\": \"^8.10.1\"\n  },\n  \"dependencies\": {\n    \"@appsignal/nodejs\": \"^2.0.25\",\n    \"@babel/core\": \"^7.23.2\",\n    \"@babel/traverse\": \"^7.23.2\",\n    \"@koa/router\": \"^9.4.0\",\n    \"@sentry/node\": \"5.13.2\",\n    \"acorn\": \"^5.1.2\",\n    \"acorn-dynamic-import\": \"^2.0.2\",\n    \"acorn-jsx\": \"^4.0.1\",\n    \"acorn-object-spread\": \"LivelyKernel/acorn-object-spread\",\n    \"axios\": \"^1.6.0\",\n    \"babel-plugin-dynamic-import-node\": \"^1.1.0\",\n    \"babel-plugin-syntax-dynamic-import\": \"^6.18.0\",\n    \"babel-plugin-transform-async-to-generator\": \"^6.24.1\",\n    \"babel-plugin-transform-class-properties\": \"^6.24.1\",\n    \"babel-plugin-transform-decorators-legacy\": \"^1.3.4\",\n    \"babel-plugin-transform-object-rest-spread\": \"^6.26.0\",\n    \"babel-preset-env\": \"^1.6.0\",\n    \"babel-preset-react\": \"^6.24.1\",\n    \"babel-traverse\": \"^6.26.0\",\n    \"babylon\": \"^6.18.0\",\n    \"base-64\": \"^0.1.0\",\n    \"debug\": \"^2.6.8\",\n    \"envfile\": \"^7.1.0\",\n    \"gitignore-parser\": \"^0.0.2\",\n    \"humps\": \"CompuIves/humps\",\n    \"istextorbinary\": \"^6.0.0\",\n    \"jszip\": \"^3.5.0\",\n    \"koa\": \"^2.2.0\",\n    \"koa-bodyparser\": \"^4.2.0\",\n    \"lodash.pickby\": \"^4.6.0\",\n    \"lru-cache\": \"^4.1.1\",\n    \"node-fetch\": \"^2.6.7\",\n    \"redis\": \"^3.1.1\",\n    \"rimraf\": \"^2.6.1\",\n    \"shortid\": \"^2.2.8\"\n  },\n  \"jest\": {\n    \"transform\": {\n      \".(ts|tsx)\": \"<rootDir>../../node_modules/ts-jest/preprocessor.js\"\n    },\n    \"testEnvironment\": \"node\",\n    \"moduleFileExtensions\": [\n      \"ts\",\n      \"tsx\",\n      \"js\",\n      \"json\"\n    ],\n    \"testPathIgnorePatterns\": [\n      \"<rootDir>/node_modules/\",\n      \"<rootDir>/dist/\"\n    ],\n    \"testRegex\": \"(/__tests__/.*|\\\\.(test|spec))\\\\.(ts|tsx|js)$\"\n  }\n}\n"
  },
  {
    "path": "packages/git-extractor/src/index.ts",
    "content": "import * as Sentry from \"@sentry/node\";\nimport * as Koa from \"koa\";\nimport * as bodyParser from \"koa-bodyparser\";\nimport * as Router from \"@koa/router\";\n\nimport camelize from \"./middleware/camelize\";\nimport decamelize from \"./middleware/decamelize\";\nimport errorHandler from \"./middleware/error-handler\";\nimport appSignalMiddleware from \"./middleware/appsignal\";\n// MIDDLEWARE\nimport logger from \"./middleware/logger\";\nimport notFound from \"./middleware/not-found\";\nimport * as define from \"./routes/define\";\n// ROUTES\nimport * as github from \"./routes/github\";\nimport { appsignal } from \"./utils/appsignal\";\nimport log from \"./utils/log\";\n\nSentry.init({\n  dsn: \"https://4917ce43c4ca42a1acb85b2843b79c6b@sentry.io/4377691\",\n});\n\nconst DEFAULT_PORT = process.env.PORT || 2000;\nconst app = new Koa();\nconst router = new Router();\n\napp.use(errorHandler);\napp.use(logger);\napp.use(bodyParser({ jsonLimit: \"50mb\" }));\napp.use(camelize);\napp.use(decamelize);\napp.use(notFound);\napp.use(appSignalMiddleware);\n\nrouter\n  .get(\n    \"/git/github/data/:username/:repo/:branch*/commit/:commitSha/path/:path*\",\n    github.data\n  )\n  .get(\"/git/github/rights/:username/:repo\", github.getRights)\n  .get(\"/git/github/info/:username/:repo/tree/:branch/:path*\", github.info) // allow tree urls\n  .get(\"/git/github/info/:username/:repo/blob/:branch/:path*\", github.info) // allow blob urls\n  .get(\"/git/github/info/:username/:repo/commit/:branch\", github.info) // allow commit urls\n  .get(\"/git/github/info/:username/:repo\", github.info) // For when tree isn't in path (root path)\n  .get(\"/git/github/info/:username/:repo/pull/:pull\", github.pullInfo) // allow pull urls\n  .post(\"/git/github/compare/:username/:repo\", github.compare) // Compare changes between branches and commits\n  // Push\n  .post(\n    \"/git/github/commit/:username/:repo/:branch*/path/:path*\",\n    github.commit\n  )\n  .post(\"/git/github/pr/:username/:repo/:branch*/path/:path*\", github.pr)\n  .post(\"/git/github/repo/:username/:repo\", github.repo)\n  .post(\"/define\", define.define);\n\napp.use(router.routes()).use(router.allowedMethods());\n\nlog(`Listening on ${DEFAULT_PORT}`);\napp.listen(DEFAULT_PORT);\n\nconsole.log(\n  JSON.stringify({\n    message: `AppSignal ${appsignal.VERSION}, active: ${appsignal.isActive}`,\n  })\n);\n\napp.on(\"error\", (err, ctx) => {\n  const span = appsignal.tracer().currentSpan();\n  if (span) {\n    span.addError(err);\n  }\n\n  Sentry.withScope(function (scope) {\n    scope.addEventProcessor(function (event) {\n      return Sentry.Handlers.parseRequest(event, ctx.request);\n    });\n    Sentry.captureException(err);\n  });\n});\n"
  },
  {
    "path": "packages/git-extractor/src/middleware/appsignal.ts",
    "content": "import { Context } from \"koa\";\nimport { appsignal } from \"../utils/appsignal\";\n\nconst appSignal = async (ctx: Context, next: () => Promise<any>) => {\n  const tracer = appsignal.tracer();\n  const rootSpan = tracer.currentSpan();\n\n  if (!rootSpan) {\n    return next();\n  }\n\n  const { req, res } = ctx;\n  tracer.wrapEmitter(req);\n  tracer.wrapEmitter(res);\n\n  // identifies the span in the stacked graphs\n  rootSpan.setCategory(\"process_request.koa\");\n\n  return tracer.withSpan(rootSpan, async (span) => {\n    try {\n      await next();\n    } finally {\n      const { method, params = {}, query = {}, routerPath } = ctx;\n\n      // set route params (if parsed by koa correctly)\n      span.setSampleData(\"params\", { ...params, ...query });\n      if (routerPath) {\n        span.setName(`${method} ${routerPath}`);\n      }\n\n      span.close();\n    }\n  });\n};\n\nexport default appSignal;\n"
  },
  {
    "path": "packages/git-extractor/src/middleware/camelize.ts",
    "content": "import { Context } from \"koa\";\nimport { camelizeKeys } from \"humps\";\n\nconst camelizeMiddleware = async (ctx: Context, next: () => Promise<any>) => {\n  if (ctx.request.body) {\n    const originalBody = ctx.request.body;\n    ctx.request.body = camelizeKeys(ctx.request.body);\n\n    // Don't camelize files object, because there will be paths\n    // with underscores and it's user input.\n    if (ctx.request.body.files) {\n      ctx.request.body.files = originalBody.files;\n    }\n  }\n\n  await next();\n};\n\nexport default camelizeMiddleware;\n"
  },
  {
    "path": "packages/git-extractor/src/middleware/decamelize.ts",
    "content": "import { Context } from \"koa\";\nimport { decamelizeKeys } from \"humps\";\n\nconst decamelizeMiddleware = async (ctx: Context, next: () => Promise<any>) => {\n  await next();\n\n  ctx.body = decamelizeKeys(ctx.body as object[]);\n};\n\nexport default decamelizeMiddleware;\n"
  },
  {
    "path": "packages/git-extractor/src/middleware/error-handler.ts",
    "content": "import { Context } from \"koa\";\n\n// Error handler\nconst errorHandler = async (ctx: Context, next: () => Promise<any>) => {\n  try {\n    await next();\n  } catch (e) {\n    ctx.status = e.status || (e.response && e.response.status) || 500;\n    ctx.body = {\n      error:\n        e.response && e.response.data ? e.response.data.message : e.message,\n    };\n\n    if (e.response && e.response.data) {\n      console.log(\"ERROR: \" + e.response.data.message);\n    }\n\n    ctx.app.emit(\"error\", e, ctx);\n  }\n};\n\nexport default errorHandler;\n"
  },
  {
    "path": "packages/git-extractor/src/middleware/logger.ts",
    "content": "import { Middleware, Context } from \"koa\";\n\ninterface ILogParams {\n  method: string;\n  url: string;\n  duration: number;\n  error?: string;\n}\n\nfunction log({ method, url, duration, error }: ILogParams) {\n  const log = {\n    method,\n    path: url,\n    duration,\n    error,\n  };\n  console.log(JSON.stringify(log));\n}\n\nconst logger = async (ctx: Context, next: () => Promise<any>) => {\n  const start = +new Date();\n\n  try {\n    await next();\n  } catch (e) {\n    const ms = +new Date() - start;\n    log({ method: ctx.method, duration: ms, error: e.message, url: ctx.url });\n    throw e;\n  }\n\n  const ms = +new Date() - start;\n  log({ method: ctx.method, duration: ms, url: ctx.url });\n};\n\nexport default logger;\n"
  },
  {
    "path": "packages/git-extractor/src/middleware/not-found.ts",
    "content": "import { Context } from \"koa\";\n\n// Not found handler\nconst notFound = async (ctx: Context, next: () => Promise<any>) => {\n  await next();\n\n  if (ctx.status === 404) {\n    ctx.body = { error: \"Page not found\" };\n  }\n};\n\nexport default notFound;\n"
  },
  {
    "path": "packages/git-extractor/src/routes/define.test.ts",
    "content": "import { createSandboxFromDefine } from \"./define\";\n\nit(\"can infer title and description\", async () => {\n  const payload = [\n    {\n      path: \"package.json\",\n      content: JSON.stringify({\n        title: \"test\",\n        description: \"test description\",\n        dependencies: {},\n      }),\n      isBinary: false,\n    },\n  ];\n\n  const result = await createSandboxFromDefine(payload);\n\n  expect(result.title).toBe(\"test\");\n  expect(result.description).toBe(\"test description\");\n});\n\nit(\"works with leading slashes\", async () => {\n  const payload = [\n    {\n      path: \"/package.json\",\n      content: JSON.stringify({\n        title: \"test\",\n        description: \"test description\",\n        dependencies: {},\n      }),\n      isBinary: false,\n    },\n  ];\n\n  const result = await createSandboxFromDefine(payload);\n\n  expect(result.title).toBe(\"test\");\n  expect(result.description).toBe(\"test description\");\n});\n"
  },
  {
    "path": "packages/git-extractor/src/routes/define.ts",
    "content": "import { Context } from \"koa\";\nimport createSandbox from \"codesandbox-import-utils/lib/create-sandbox\";\nimport {\n  INormalizedModules,\n  IModule,\n  ITemplate,\n} from \"codesandbox-import-util-types\";\n\nexport const createSandboxFromDefine = async (\n  files: Array<IModule & { path: string }>\n) => {\n  const normalizedFiles: INormalizedModules = files\n    .map((file) => {\n      if (file.path[0] === \"/\") {\n        // Remove the leading slash\n        const p = file.path.split(\"\");\n        p.shift();\n        file.path = p.join(\"\");\n      }\n\n      if (typeof file.content === \"object\") {\n        file.content = JSON.stringify(file.content, null, 2);\n      }\n\n      return file;\n    })\n    .reduce(\n      (total: INormalizedModules, next) => ({\n        ...total,\n        [next.path]: next,\n      }),\n      {}\n    );\n\n  try {\n    const pkg = normalizedFiles[\"/package.json\"];\n\n    if (pkg && pkg.type === \"file\") {\n      const parsed = JSON.parse(pkg.content);\n      console.log(\n        `Creating defined sandbox with ${JSON.stringify(\n          parsed.dependencies\n        )} deps, ${JSON.stringify(parsed.devDependencies)} devDeps.`\n      );\n    }\n  } catch (e) {\n    /* nothing */\n  }\n\n  return createSandbox(normalizedFiles);\n};\n\nexport const define = async (ctx: Context, _next: () => Promise<any>) => {\n  const { files, template } = ctx.request.body;\n\n  const sandbox = await createSandboxFromDefine(files);\n\n  if (template) {\n    sandbox.template = template as ITemplate;\n  }\n\n  ctx.body = {\n    sandbox,\n  };\n};\n"
  },
  {
    "path": "packages/git-extractor/src/routes/github/api.ts",
    "content": "import * as Sentry from \"@sentry/node\";\nimport axios, { AxiosPromise, AxiosRequestConfig } from \"axios\";\nimport * as zip from \"jszip\";\nimport * as LRU from \"lru-cache\";\nimport fetch from \"node-fetch\";\nimport { encode } from \"base-64\";\nimport { IGitInfo, ITree } from \"./push\";\nimport { appsignal } from \"../../utils/appsignal\";\n\nconst API_URL = \"https://api.github.com\";\nconst REPO_BASE_URL = API_URL + \"/repos\";\n\nconst GITHUB_CLIENT_ID = process.env.GITHUB_CLIENT_ID;\nconst GITHUB_CLIENT_SECRET = process.env.GITHUB_CLIENT_SECRET;\n\nconst NOT_FOUND_MESSAGE =\n  \"Could not find the specified repository or directory\";\n\nfunction buildRepoApiUrl(username: string, repo: string) {\n  return `${REPO_BASE_URL}/${username}/${repo}`;\n}\n\nfunction buildPullApiUrl(username: string, repo: string, pull: number) {\n  return `${buildRepoApiUrl(username, repo)}/pulls/${pull}`;\n}\n\nfunction buildCommitApiUrl(username: string, repo: string, commitSha: string) {\n  return `${REPO_BASE_URL}/${username}/${repo}/commits/${commitSha}`;\n}\n\nfunction buildTreesApiUrl(username: string, repo: string, treeSha: string) {\n  return `${REPO_BASE_URL}/${username}/${repo}/git/trees/${treeSha}`;\n}\n\nfunction buildContentsApiUrl(username: string, repo: string, path: string) {\n  return `${REPO_BASE_URL}/${username}/${repo}/contents/${path}`;\n}\n\nfunction requestAxios(\n  requestName: string,\n  requestObject: AxiosRequestConfig\n): AxiosPromise {\n  const tracer = appsignal.tracer();\n  const span = tracer.createSpan(undefined, tracer.currentSpan());\n  return tracer.withSpan(span, (span) => {\n    span.setCategory(\"request-api.github\");\n    span.setName(requestName);\n    const meter = appsignal.metrics();\n\n    const snakeCaseRequestName = requestName.toLowerCase().replace(/\\s/g, \"_\");\n    meter.incrementCounter(`github_request_${snakeCaseRequestName}`, 1);\n\n    // To keep track of how many binary files we are actually trying to request SHAs for\n    if (\n      snakeCaseRequestName === \"checking_remaining_rate_limit\" &&\n      requestObject?.params?.numberOfRequests\n    ) {\n      meter.incrementCounter(\n        \"number_of_binary_files\",\n        requestObject.params.numberOfRequests\n      );\n    }\n\n    if (requestObject.auth) {\n      // In the case we're using not the user token, let's log that as well!\n      meter.incrementCounter(\n        `github_unauthorized_request_${snakeCaseRequestName}`,\n        1\n      );\n    }\n\n    return axios(requestObject)\n      .then((res) => {\n        span.close();\n        return res;\n      })\n      .catch((e) => {\n        span.addError(e);\n        span.close();\n\n        return Promise.reject(e);\n      });\n  });\n}\n\nfunction buildCompareApiUrl(\n  username: string,\n  repo: string,\n  baseRef: string,\n  headRef: string\n) {\n  return `${buildRepoApiUrl(username, repo)}/compare/${baseRef}...${headRef}`;\n}\n\nfunction createAxiosRequestConfig(token?: string): AxiosRequestConfig {\n  const Accept = \"application/vnd.github.v3+json\";\n  return token\n    ? {\n        headers: { Accept, Authorization: `Bearer ${token}` },\n      }\n    : {\n        auth: {\n          username: GITHUB_CLIENT_ID!,\n          password: GITHUB_CLIENT_SECRET!,\n        },\n        headers: { Accept },\n      };\n}\n\nfunction buildContentsUrl(\n  username: string,\n  repo: string,\n  branch: string,\n  path: string\n) {\n  return `${buildRepoApiUrl(username, repo)}/contents/${path}?ref=${branch}`;\n}\n\nfunction buildCommitsUrl(\n  username: string,\n  repo: string,\n  branch: string,\n  path: string\n) {\n  return `${buildRepoApiUrl(username, repo)}/commits/${branch}?path=${path}`;\n}\n\nfunction buildCommitsByPathUrl(\n  username: string,\n  repo: string,\n  branch: string,\n  path: string\n) {\n  return `${buildRepoApiUrl(\n    username,\n    repo\n  )}/commits?sha=${branch}&path=${path}`;\n}\n\ninterface IRepoResponse {\n  id: number;\n  node_id: string;\n  name: string;\n  full_name: string;\n  private: boolean;\n  default_branch: string;\n}\n\ninterface ICompareResponse {\n  files: Array<{\n    sha: string;\n    filename: string;\n    status: \"added\" | \"deleted\";\n    additions: number;\n    deletions: number;\n    changes: number;\n    contents_url: string;\n    patch?: string;\n  }>;\n  base_commit: {\n    sha: string;\n  };\n  merge_base_commit: {\n    sha: string;\n  };\n  commits: Array<{ sha: string }>;\n}\n\ninterface IContentResponse {\n  content: string;\n  encoding: \"base64\" | \"utf-8\" | \"binary\";\n  sha: string;\n}\n\ninterface ICommitResponse {\n  commit: {\n    tree: {\n      sha: string;\n    };\n  };\n}\n\ninterface IPrResponse {\n  number: number;\n  repo: string;\n  username: string;\n  branch: string;\n  state: string;\n  merged: boolean;\n  mergeable: boolean;\n  mergeable_state: string;\n  commitSha: string;\n  baseCommitSha: string;\n  rebaseable: boolean;\n  commits: number;\n  additions: number;\n  deletions: number;\n  changed_files: number;\n}\n\ninterface IDeleteContentResponse {\n  commit: {\n    sha: string;\n  };\n}\n\nexport async function getComparison(\n  username: string,\n  repo: string,\n  baseRef: string,\n  headRef: string,\n  token: string\n) {\n  const url = buildCompareApiUrl(username, repo, baseRef, headRef);\n\n  const response: { data: ICompareResponse } = await requestAxios(\n    \"Get Comparison\",\n    {\n      url: encodeURI(url),\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  return response.data;\n}\n\nexport async function getContent(url: string, token: string) {\n  const response: { data: IContentResponse } = await requestAxios(\n    \"Get Content\",\n    {\n      url: encodeURI(url),\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  return response.data;\n}\n\ntype RepoInfoCache = {\n  etag: string;\n  response: IRepoResponse;\n};\nconst repoInfoCache = new LRU<string, RepoInfoCache>({\n  max: 50 * 1024 * 1024, // 50 MB\n});\n\nexport async function getRepo(username: string, repo: string, token?: string) {\n  const url = buildRepoApiUrl(username, repo);\n  const cacheIdentifier = [username, repo, token].filter(Boolean).join(\"::\");\n  let etagCache: RepoInfoCache | undefined = repoInfoCache.get(cacheIdentifier);\n\n  const config = {\n    url: encodeURI(url),\n    ...createAxiosRequestConfig(token),\n  };\n\n  if (etagCache) {\n    config.headers = config.headers = {};\n    config.headers[\"If-None-Match\"] = etagCache.etag;\n    config.validateStatus = function (status: number) {\n      // Axios sees 304 (Not Modified) as an error. We don't want that.\n      return status < 400; // Reject only if the status code is greater than or equal to 400\n    };\n  }\n\n  const response: {\n    data: IRepoResponse;\n    status: number;\n    headers: any;\n  } = await requestAxios(\"Get Repo\", config);\n\n  if (response.status === 304) {\n    return etagCache!.response;\n  } else {\n    const etag = response.headers.etag;\n    repoInfoCache.set(cacheIdentifier, {\n      etag,\n      response: response.data,\n    });\n  }\n\n  return response.data;\n}\n\nexport async function getTreeWithDeletedFiles(\n  username: string,\n  repo: string,\n  treeSha: string,\n  deletedFiles: string[],\n  token: string,\n  path = []\n) {\n  async function fetchTree(sha: string) {\n    const url = buildTreesApiUrl(username, repo, sha);\n\n    const response: { data: ITreeResponse } = await requestAxios(\"Get Tree\", {\n      url: encodeURI(url),\n      ...createAxiosRequestConfig(token),\n    });\n\n    return response.data.tree;\n  }\n\n  let tree = await fetchTree(treeSha);\n\n  return deletedFiles.reduce(\n    (aggr, file) =>\n      aggr.then(async (tree) => {\n        const parts = file.split(\"/\");\n        parts.pop();\n        const dirs = parts.reduce<string[]>((aggr, part, index) => {\n          return aggr.concat(\n            aggr[index - 1] ? aggr[index - 1] + \"/\" + part : part\n          );\n        }, []);\n\n        const newTree = await dirs.reduce(\n          (subaggr, dir) =>\n            subaggr.then(async (tree) => {\n              const treeIndex = tree.findIndex(\n                (item) => item.type === \"tree\" && item.path === dir\n              );\n\n              if (treeIndex >= 0) {\n                const nestedTree = await fetchTree(tree[treeIndex].sha);\n                const newTree = tree.concat(\n                  nestedTree.map((item) => ({\n                    ...item,\n                    path: dir + \"/\" + item.path,\n                  }))\n                );\n                newTree.splice(treeIndex, 1);\n\n                return newTree;\n              }\n\n              return tree;\n            }),\n          Promise.resolve(tree)\n        );\n\n        return newTree.filter((item) => item.path !== file);\n      }),\n    Promise.resolve(tree)\n  );\n}\n\nexport async function getCommitTreeSha(\n  username: string,\n  repo: string,\n  commitSha: string,\n  token: string\n) {\n  const url = buildCommitApiUrl(username, repo, commitSha);\n\n  const response: { data: ICommitResponse } = await requestAxios(\n    \"Get CommitTreeSha\",\n    {\n      url: encodeURI(url),\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  return response.data.commit.tree.sha;\n}\n\nexport async function getLatestCommitShaOfFile(\n  username: string,\n  repo: string,\n  branch: string,\n  path: string,\n  token?: string\n): Promise<string | undefined> {\n  const url = buildCommitsByPathUrl(username, repo, branch, path);\n  const response: { data: { sha: string }[] } = await requestAxios(\n    \"Get Commits of File\",\n    {\n      url: encodeURI(url),\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  if (response.data[0]) {\n    return response.data[0].sha;\n  }\n\n  return undefined;\n}\n\nexport async function isRepoPrivate(\n  username: string,\n  repo: string,\n  token: string\n) {\n  const data = await getRepo(username, repo, token);\n\n  return data.private;\n}\n\ninterface RightsResponse {\n  permissions: {\n    admin: boolean;\n    push: boolean;\n    pull: boolean;\n  };\n}\n\n/**\n * Fetch the permissions of a user on a specific repository.\n */\nexport async function fetchRights(\n  username: string,\n  repo: string,\n  token?: string\n): Promise<\"admin\" | \"write\" | \"read\" | \"none\"> {\n  const url = buildRepoApiUrl(username, repo);\n\n  try {\n    const response: { data: RightsResponse } = await requestAxios(\n      \"Get Rights\",\n      {\n        url: encodeURI(url),\n        ...createAxiosRequestConfig(token),\n      }\n    );\n\n    // No token\n    if (!response.data.permissions) {\n      return \"none\";\n    }\n\n    if (response.data.permissions.admin) {\n      return \"admin\";\n    }\n\n    if (response.data.permissions.push) {\n      return \"write\";\n    }\n\n    return \"read\";\n  } catch (e) {\n    if (\n      e.response &&\n      (e.response.status === 403 || e.response.status === 401)\n    ) {\n      return \"none\";\n    } else {\n      throw e;\n    }\n  }\n}\n\ninterface ITreeResponse {\n  sha: string;\n  tree: ITree;\n  truncated: boolean;\n  url: string;\n}\n\ninterface IBlobResponse {\n  url: string;\n  sha: string;\n}\n\nexport async function createPr(\n  base: {\n    username: string;\n    repo: string;\n    branch: string;\n  },\n  head: {\n    username: string;\n    repo: string;\n    branch: string;\n  },\n  title: string,\n  body: string,\n  token: string\n): Promise<IPrResponse> {\n  const { data } = await requestAxios(\"Create PR\", {\n    method: \"post\",\n    url: encodeURI(`${buildRepoApiUrl(base.username, base.repo)}/pulls`),\n    data: {\n      base: base.branch,\n      head: `${base.username === head.username ? \"\" : head.username + \":\"}${\n        head.branch\n      }`,\n      title,\n      body,\n      maintainer_can_modify: true,\n    },\n    ...createAxiosRequestConfig(token),\n  });\n\n  return {\n    number: data.number,\n    repo: data.head.repo.name,\n    username: data.head.repo.owner.login,\n    commitSha: data.head.sha,\n    branch: data.head.ref,\n    merged: data.merged,\n    state: data.state,\n    mergeable: data.mergeable,\n    mergeable_state: data.mergeable_state,\n    rebaseable: data.rebaseable,\n    additions: data.additions,\n    changed_files: data.changed_files,\n    commits: data.commits,\n    baseCommitSha: data.base.sha,\n    deletions: data.deletions,\n  };\n}\n\nexport async function createBlob(\n  username: string,\n  repo: string,\n  content: string,\n  encoding: \"utf-8\" | \"base64\",\n  token: string\n) {\n  const response: { data: IBlobResponse } = await requestAxios(\"Create Blob\", {\n    method: \"post\",\n    url: encodeURI(`${buildRepoApiUrl(username, repo)}/git/blobs`),\n    data: { content: content, encoding },\n    ...createAxiosRequestConfig(token),\n  });\n\n  return response.data;\n}\n\ninterface ICreateTreeResponse {\n  sha: string;\n  url: string;\n  tree: ITree;\n}\n\nexport async function createTree(\n  username: string,\n  repo: string,\n  tree: ITree,\n  baseTreeSha: string | null,\n  token: string\n) {\n  const response: { data: ICreateTreeResponse } = await requestAxios(\n    \"Create Tree\",\n    {\n      method: \"post\",\n      url: encodeURI(`${buildRepoApiUrl(username, repo)}/git/trees`),\n      data: { base_tree: baseTreeSha, tree },\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  return response.data;\n}\n\ninterface ICreateCommitResponse {\n  sha: string;\n  url: string;\n  author: {\n    date: string;\n    name: string;\n    email: string;\n  };\n  committer: {\n    date: string;\n    name: string;\n    email: string;\n  };\n  message: string;\n}\n\n/**\n * Create a commit from the given tree\n */\nexport async function createCommit(\n  username: string,\n  repo: string,\n  treeSha: string,\n  parentCommitShas: string[],\n  message: string,\n  token: string\n) {\n  const response: { data: ICreateCommitResponse } = await requestAxios(\n    \"Create Commit\",\n    {\n      method: \"post\",\n      url: encodeURI(`${buildRepoApiUrl(username, repo)}/git/commits`),\n      data: { tree: treeSha, message, parents: parentCommitShas },\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  return response.data;\n}\n\ninterface IUpdateReferenceResponse {\n  ref: string;\n  url: string;\n}\n\nexport async function updateReference(\n  username: string,\n  repo: string,\n  branch: string,\n  commitSha: string,\n  token: string\n) {\n  const response: { data: IUpdateReferenceResponse } = await requestAxios(\n    \"Update Reference\",\n    {\n      method: \"patch\",\n      url: encodeURI(\n        `${buildRepoApiUrl(username, repo)}/git/refs/heads/${branch}`\n      ),\n      data: { sha: commitSha, force: true },\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  return response.data;\n}\n\ninterface ICreateReferenceResponse {\n  ref: string;\n  url: string;\n  object: {\n    type: string;\n    sha: string;\n    url: string;\n  };\n}\n\nexport async function createReference(\n  username: string,\n  repo: string,\n  branch: string,\n  refSha: string,\n  token: string\n) {\n  const response: {\n    data: ICreateReferenceResponse;\n  } = await requestAxios(\"Create Reference\", {\n    method: \"post\",\n    url: encodeURI(`${buildRepoApiUrl(username, repo)}/git/refs`),\n    data: { ref: `refs/heads/${branch}`, sha: refSha },\n    ...createAxiosRequestConfig(token),\n  });\n\n  return response.data;\n}\n\ninterface ICreateForkResponse {\n  name: string;\n  full_name: string;\n  description: string;\n  private: boolean;\n  fork: boolean;\n}\n\nexport async function createFork(\n  username: string,\n  repo: string,\n  token: string\n) {\n  const response: { data: ICreateForkResponse } = await requestAxios(\n    \"Create Fork\",\n    {\n      method: \"post\",\n      url: encodeURI(`${buildRepoApiUrl(username, repo)}/forks`),\n      data: {},\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  return response.data;\n}\n\ninterface ICreateRepoResponse {\n  name: string;\n  full_name: string;\n  description: string;\n  private: false;\n  fork: false;\n  url: string;\n  default_branch: string;\n}\n\nexport async function getDefaultBranch(\n  username: string,\n  repo: string,\n  token?: string\n) {\n  const data = await getRepo(username, repo, token);\n\n  return data.default_branch;\n}\n\nexport async function createRepo(\n  username: string,\n  repo: string,\n  token: string,\n  privateRepo: boolean = false\n) {\n  const repoExists = await doesRepoExist(username, repo, token);\n  if (repoExists) {\n    const error = new Error(\n      `The repository ${username}/${repo} already exists.`\n    );\n    // @ts-ignore\n    error.status = 422;\n\n    throw error;\n  }\n\n  const response: { data: ICreateRepoResponse } = await requestAxios(\n    \"Create Repo\",\n    {\n      method: \"post\",\n      url: encodeURI(`${API_URL}/user/repos`),\n      data: {\n        name: repo,\n        description: \"Created with CodeSandbox\",\n        homepage: `https://codesandbox.io/p/github/${username}/${repo}`,\n        auto_init: true,\n        private: privateRepo,\n      },\n      ...createAxiosRequestConfig(token),\n    }\n  );\n\n  return response.data;\n}\n\n/**\n * Check if repository exists\n */\nexport async function doesRepoExist(\n  username: string,\n  repo: string,\n  userToken?: string\n) {\n  try {\n    await requestAxios(\"Repo Exists\", {\n      method: \"get\",\n      url: encodeURI(buildRepoApiUrl(username, repo)),\n      ...createAxiosRequestConfig(userToken),\n    });\n\n    return true;\n  } catch (e) {\n    if (e.response && e.response.status === 404) {\n      return false;\n    }\n\n    throw e;\n  }\n}\n\ninterface CommitResponse {\n  commitSha: string;\n  username: string;\n  repo: string;\n  branch: string;\n  path: string;\n}\n\nconst shaCache = new LRU({\n  max: 500,\n  maxAge: 1000 * 5, // 5 seconds\n});\n\nconst etagCache = new LRU<string, { etag: string; sha: string }>({\n  max: 50000,\n});\n\nexport function resetShaCache(gitInfo: IGitInfo) {\n  const { username, repo, branch, path = \"\" } = gitInfo;\n\n  return shaCache.del(username + repo + branch + path);\n}\n\nexport async function fetchRepoInfo(\n  username: string,\n  repo: string,\n  branch: string,\n  path: string = \"\",\n  skipCache: boolean = false,\n  userToken?: string\n): Promise<CommitResponse> {\n  let span;\n  try {\n    const cacheId = username + repo + branch + path;\n    // We cache the latest retrieved sha for a limited time, so we don't spam the\n    // GitHub API for every request\n    let latestSha = shaCache.get(cacheId) as string;\n\n    if (!latestSha || skipCache) {\n      const tracer = appsignal.tracer();\n      span = tracer.createSpan(undefined, tracer.currentSpan());\n      span.setCategory(\"request-api.github\");\n      span.setName(\"GET api.github.com/info\");\n\n      const url = buildCommitsUrl(username, repo, branch, path);\n\n      const headers: { \"If-None-Match\"?: string } = {};\n\n      const etagCacheResponse = etagCache.get(cacheId);\n      if (etagCacheResponse) {\n        // Use an ETag header so duplicate requests don't count towards the limit\n        headers[\"If-None-Match\"] = etagCacheResponse.etag;\n      }\n\n      const defaultConfig = createAxiosRequestConfig(userToken);\n      const response = await requestAxios(\"Get Repo Info\", {\n        url: encodeURI(url),\n        validateStatus: function (status) {\n          // Axios sees 304 (Not Modified) as an error. We don't want that.\n          return status < 400; // Reject only if the status code is greater than or equal to 400\n        },\n        ...defaultConfig,\n        headers: {\n          ...defaultConfig.headers,\n          ...headers,\n        },\n      });\n\n      span.setSampleData(\"custom_data\", {\n        etagCacheUsed: response.status === 304 && etagCacheResponse,\n      });\n      const meter = appsignal.metrics();\n      if (response.status === 304 && etagCacheResponse) {\n        meter.incrementCounter(\"github_cache_hit\", 1);\n\n        latestSha = etagCacheResponse.sha;\n      } else {\n        meter.incrementCounter(\"github_cache_miss\", 1);\n\n        latestSha = response.data.sha;\n\n        const etag = response.headers.etag;\n\n        // Only save towards the cache if there is no userToken. For people with a userToken\n        // we have 12k requests per hour to use. Won't hit that ever.\n        if (etag && !userToken) {\n          etagCache.set(cacheId, {\n            etag,\n            sha: response.data.sha,\n          });\n        }\n      }\n\n      shaCache.set(cacheId, latestSha);\n    }\n\n    return {\n      commitSha: latestSha,\n      username,\n      repo,\n      branch,\n      path,\n    };\n  } catch (e) {\n    // There is a chance that the branch contains slashes, we try to fix this\n    // by requesting again with the first part of the path appended to the branch\n    // when a request fails (404)\n    if (\n      e.response &&\n      (e.response.status === 404 || e.response.status === 422)\n    ) {\n      const [branchAddition, ...newPath] = path.split(\"/\");\n      const newBranch = `${branch}/${branchAddition}`;\n\n      if (branchAddition !== \"\") {\n        return await fetchRepoInfo(\n          username,\n          repo,\n          newBranch,\n          newPath.join(\"/\"),\n          false,\n          userToken\n        );\n      }\n\n      e.message = NOT_FOUND_MESSAGE;\n    }\n\n    if (e.response && e.response.status === 403 && userToken == null) {\n      const meter = appsignal.metrics();\n      meter.incrementCounter(\"github_rate_limit\", 1);\n    }\n\n    Sentry.captureException(e);\n\n    throw e;\n  } finally {\n    if (span) {\n      span.close();\n    }\n  }\n}\n\nexport async function fetchPullInfo(\n  username: string,\n  repo: string,\n  pull: number,\n  userToken?: string\n): Promise<IPrResponse> {\n  const url = buildPullApiUrl(username, repo, pull);\n\n  try {\n    const response = await requestAxios(\"Get Pull Info\", {\n      url: encodeURI(url),\n      ...createAxiosRequestConfig(userToken),\n    });\n\n    const data = response.data;\n\n    return {\n      number: data.head.number,\n      repo: data.head.repo.name,\n      username: data.head.repo.owner.login,\n      commitSha: data.head.sha,\n      branch: data.head.ref,\n      state: data.state,\n      merged: data.merged,\n      mergeable: data.mergeable,\n      mergeable_state: data.mergeable_state,\n      rebaseable: data.rebaseable,\n      additions: data.additions,\n      changed_files: data.changed_files,\n      commits: data.commits,\n      baseCommitSha: data.base.sha,\n      deletions: data.deletions,\n    };\n  } catch (e) {\n    e.message = \"Could not find pull request information\";\n    throw e;\n  }\n}\n\nconst MAX_ZIP_SIZE = 128 * 1024 * 1024; // 128Mb\n\nexport async function downloadZip(\n  gitInfo: IGitInfo,\n  commitSha: string,\n  userToken?: string\n) {\n  const repoUrl = buildRepoApiUrl(gitInfo.username, gitInfo.repo);\n  const url = encodeURI(`${repoUrl}/zipball/${commitSha}`);\n  const Accept = \"application/vnd.github.v3+json\";\n  const buffer: Buffer = await fetch(url, {\n    headers: {\n      Authorization: userToken\n        ? `Bearer ${userToken}`\n        : `Basic ${encode(`${GITHUB_CLIENT_ID}:${GITHUB_CLIENT_SECRET}`)}`,\n      Accept,\n    },\n  }).then((res) => {\n    if (Number(res.headers.get(\"Content-Length\")) > MAX_ZIP_SIZE) {\n      throw new Error(\"This repo is too big to import\");\n    }\n\n    if (!res.ok) {\n      return res.text().then((text) => {\n        const error = new Error(\n          `Could not import repo from GitHub, error from GitHub. Status code: ${res.status}, error: ${text}`\n        );\n\n        // Forward the error status from GitHub, eg. if GH returns 404 we return that as well.\n        // This is handled in error-handler.ts middleware.\n        // @ts-ignore\n        error.status = res.status;\n\n        throw error;\n      });\n    } else {\n      return res.buffer();\n    }\n  });\n\n  const loadedZip = await zip.loadAsync(buffer);\n\n  return loadedZip;\n}\n\nexport async function checkRemainingRateLimit(\n  numberOfRequests: number\n): Promise<boolean> {\n  const url = \"https://api.github.com/rate_limit\";\n  const response: {\n    data: { resources: { core: { remaining: number } } };\n  } = await requestAxios(\"Checking Remaining Rate Limit\", {\n    url: encodeURI(url),\n    params: {\n      numberOfRequests: numberOfRequests,\n    },\n  });\n\n  let remaining = 0;\n\n  if (response.data) {\n    remaining = response.data.resources.core.remaining;\n  }\n\n  return numberOfRequests < remaining;\n}\n"
  },
  {
    "path": "packages/git-extractor/src/routes/github/index.ts",
    "content": "import * as Sentry from \"@sentry/node\";\nimport { IModule, INormalizedModules } from \"codesandbox-import-util-types\";\nimport createSandbox from \"codesandbox-import-utils/lib/create-sandbox\";\nimport { Context } from \"koa\";\n\nimport * as api from \"./api\";\nimport { getComparison } from \"./api\";\nimport { downloadRepository } from \"./pull/download\";\nimport * as push from \"./push\";\nimport { IChanges, IGitInfo } from \"./push\";\n\nconst getUserToken = (ctx: Context) => {\n  const header = ctx.header.authorization;\n  if (header) {\n    if (header.startsWith(\"Bearer \")) {\n      return header.replace(\"Bearer \", \"\");\n    }\n    return header;\n  }\n\n  return undefined;\n};\n\nexport const info = async (ctx: Context, next: () => Promise<any>) => {\n  const userToken = getUserToken(ctx);\n  let branch = ctx.params.branch;\n\n  if (!branch) {\n    branch = await api.getDefaultBranch(\n      ctx.params.username,\n      ctx.params.repo,\n      userToken\n    );\n  }\n\n  const response = await api.fetchRepoInfo(\n    ctx.params.username,\n    ctx.params.repo,\n    branch,\n    ctx.params.path,\n    false,\n    userToken\n  );\n\n  ctx.body = response;\n};\n\n// We receive paths as \"/src/index.js\" and root path as \"src\", and Git takes\n// \"src/index.js\", so we need to ensure we produce the correct paths\nconst changesWithRootPath = (changes: IChanges, rootPath = \"\"): IChanges => {\n  const convertPath = (path: string) => {\n    if (rootPath) {\n      return rootPath + path;\n    }\n\n    return path.substr(1);\n  };\n  return {\n    added: changes.added.map((change) => ({\n      ...change,\n      path: convertPath(change.path),\n    })),\n    deleted: changes.deleted.map(convertPath),\n    modified: changes.modified.map((change) => ({\n      ...change,\n      path: convertPath(change.path),\n    })),\n  };\n};\n\nexport const pullInfo = async (ctx: Context, next: () => Promise<any>) => {\n  const userToken = getUserToken(ctx);\n\n  ctx.body = await api.fetchPullInfo(\n    ctx.params.username,\n    ctx.params.repo,\n    ctx.params.pull,\n    userToken\n  );\n};\n\nexport const getRights = async (ctx: Context) => {\n  const userToken = getUserToken(ctx);\n\n  const rights = await api.fetchRights(\n    ctx.params.username,\n    ctx.params.repo,\n    userToken\n  );\n\n  ctx.body = {\n    permission: rights,\n  };\n};\n\n/**\n * This route will take a github path and return sandbox data for it\n *\n * Data contains all files, directories and package.json info\n */\nexport const data = async (ctx: Context, next: () => Promise<any>) => {\n  try {\n    // We get branch, etc from here because there could be slashes in a branch name,\n    // we can retrieve if this is the case from this method\n    let { username, repo, branch, commitSha } = ctx.params;\n    const userToken = getUserToken(ctx);\n\n    Sentry.setContext(\"repo\", {\n      username,\n      repo,\n      branch,\n      commitSha,\n    });\n\n    const path = ctx.params.path && ctx.params.path.replace(\"+\", \" \");\n\n    let title = `${username}/${repo}`;\n    if (path) {\n      const splittedPath = path.split(\"/\");\n      title = title + `: ${splittedPath[splittedPath.length - 1]}`;\n    }\n\n    let isPrivate = false;\n\n    if (userToken) {\n      isPrivate = await api.isRepoPrivate(username, repo, userToken);\n    }\n\n    if (!branch) {\n      branch = await api.getDefaultBranch(username, repo, userToken);\n    }\n\n    const downloadedFiles = await downloadRepository(\n      {\n        username,\n        repo,\n        branch,\n        path,\n      },\n      commitSha,\n      isPrivate,\n      userToken\n    );\n\n    if (isPrivate) {\n      api.resetShaCache({ branch, username, repo, path });\n    }\n\n    console.log(\n      `Creating sandbox for ${username}/${repo}, branch: ${branch}, path: ${path}`\n    );\n\n    const sandboxParams = await createSandbox(downloadedFiles);\n\n    const finalTitle = sandboxParams.title || title;\n\n    ctx.body = {\n      ...sandboxParams,\n      // If no title is set in package.json, go for this one\n      title: finalTitle,\n\n      // Privacy 2 is private, privacy 0 is public\n      privacy: isPrivate ? 2 : 0,\n    };\n  } catch (e) {\n    // Here we catch our false, preemptive rate limit and give it a proper error status code for the server.\n    if (\n      e.message == \"Can't make axios requests, not enough rate limit remaining\"\n    ) {\n      ctx.body = {\n        error: \"Can't make axios requests, not enough rate limit remaining\",\n      };\n      ctx.status = 403;\n    } else {\n      throw e;\n    }\n  }\n};\n\n/*\n  Compares two refs on the repo\n*/\nexport const compare = async (ctx: Context) => {\n  const { baseRef, headRef, token, includeContents } = ctx.request.body;\n  const { username, repo } = ctx.params;\n\n  const comparison = await getComparison(\n    username,\n    repo,\n    baseRef,\n    headRef,\n    token\n  );\n\n  if (includeContents) {\n    const files = await Promise.all(\n      comparison.files.map(\n        ({\n          additions,\n          changes,\n          contents_url,\n          deletions,\n          filename,\n          status,\n          patch,\n          sha,\n        }) => {\n          return api.getContent(contents_url, token).then((content) => {\n            const data = content.content;\n            const buffer = Buffer.from(data, content.encoding);\n\n            let stringContent: string;\n\n            // If patch it is a text file, if not it is a binary\n            if (patch) {\n              stringContent = buffer.toString(\"utf-8\");\n            } else {\n              // When we include binary files, we include them as base64. This will allow a \"merge commit\", related to\n              // a PR being out of sync with its source branch (ex. \"master\"), to add binary files\n              stringContent = buffer.toString(\"base64\");\n            }\n\n            return {\n              additions,\n              changes,\n              deletions,\n              filename,\n              status,\n              content: stringContent,\n              isBinary: !patch,\n            };\n          });\n        }\n      )\n    );\n\n    ctx.body = {\n      files,\n      baseCommitSha: comparison.base_commit.sha,\n      headCommitSha: comparison.commits.length\n        ? comparison.commits[comparison.commits.length - 1].sha\n        : comparison.merge_base_commit.sha,\n    };\n  } else {\n    ctx.body = {\n      files: comparison.files.map(\n        ({ additions, status, filename, deletions, changes }) => ({\n          additions,\n          status,\n          filename,\n          deletions,\n          changes,\n        })\n      ),\n      baseCommitSha: comparison.base_commit.sha,\n      headCommitSha: comparison.commits.length\n        ? comparison.commits[0].sha\n        : comparison.merge_base_commit.sha,\n    };\n  }\n};\n\nexport const pr = async (ctx: Context) => {\n  const {\n    changes,\n    title,\n    description,\n    commitSha,\n    currentUser,\n    token,\n    sandboxId,\n  }: {\n    changes: IChanges;\n    title: string;\n    description: string;\n    commitSha: string;\n    currentUser: string;\n    token: string;\n    sandboxId: string;\n  } = ctx.request.body;\n  const { username, repo, branch, path } = ctx.params;\n\n  let gitInfo: IGitInfo = {\n    username,\n    repo,\n    branch,\n    path,\n  };\n\n  const rights = await api.fetchRights(username, repo, token);\n\n  if (rights === \"none\" || rights === \"read\") {\n    // Ah, we need to fork...\n    gitInfo = await push.createFork(gitInfo, currentUser, token);\n  }\n\n  const commit = await push.createInitialCommit(\n    gitInfo,\n    changesWithRootPath(changes, path),\n    [commitSha],\n    token\n  );\n\n  const res = await push.createBranch(\n    gitInfo,\n    commit.sha,\n    token,\n    `csb-${sandboxId}`\n  );\n  const base = {\n    branch,\n    repo,\n    username,\n  };\n  const head = {\n    branch: res.branchName,\n    repo: gitInfo.repo,\n    username: gitInfo.username,\n  };\n\n  ctx.body = await api.createPr(base, head, title, description, token);\n};\n\nexport const commit = async (ctx: Context) => {\n  const { parentCommitShas, changes, message, token } = ctx.request.body;\n  const { username, repo, branch, path } = ctx.params;\n\n  const gitInfo: IGitInfo = {\n    username,\n    repo,\n    branch,\n    path,\n  };\n\n  const commit = await push.createCommit(\n    gitInfo,\n    changesWithRootPath(changes, path),\n    parentCommitShas,\n    message,\n    token\n  );\n\n  await api.updateReference(username, repo, branch, commit.sha, token);\n\n  ctx.body = commit;\n};\n\nexport const repo = async (ctx: Context, next: () => Promise<any>) => {\n  const {\n    token,\n    normalizedFiles: fileArray,\n    privateRepo,\n  }: {\n    token: string;\n    normalizedFiles: Array<IModule & { path: string }>;\n    privateRepo?: boolean;\n  } = ctx.request.body;\n  const { username, repo } = ctx.params;\n\n  const normalizedFiles: INormalizedModules = fileArray.reduce(\n    (total, file) => ({\n      ...total,\n      [file.path]: file,\n    }),\n    {}\n  );\n\n  if (!repo) {\n    throw new Error(\"Repo name cannot be empty\");\n  }\n\n  const result = await push.createRepo(\n    username,\n    repo,\n    normalizedFiles,\n    token,\n    privateRepo\n  );\n\n  ctx.body = result;\n};\n"
  },
  {
    "path": "packages/git-extractor/src/routes/github/pull/download.ts",
    "content": "import * as JSZip from \"jszip\";\n\nimport { isText } from \"codesandbox-import-utils/lib/is-text\";\nimport { INormalizedModules } from \"codesandbox-import-util-types\";\n\nimport { IGitInfo } from \"../push/index\";\nimport {\n  downloadZip,\n  getLatestCommitShaOfFile,\n  checkRemainingRateLimit,\n} from \"../api\";\n\nconst getFolderName = (zip: JSZip) =>\n  `${Object.keys(zip.files)[0].split(\"/\")[0]}/`;\n\n/**\n * We use https://rawgit.com/ as urls, since they change the content-type corresponding\n * to the file. Github always uses text/plain\n */\nexport const rawGitUrl = (\n  gitInfo: IGitInfo,\n  filePath: string,\n  commitSha?: string\n) => {\n  let url = `https://rawcdn.githack.com/${gitInfo.username}/${gitInfo.repo}/${\n    commitSha || gitInfo.branch\n  }/`;\n  if (gitInfo.path) {\n    url += gitInfo.path + \"/\";\n  }\n  url += filePath;\n\n  return url;\n};\n\nexport async function downloadRepository(\n  gitInfo: IGitInfo,\n  commitSha: string,\n  isPrivate: boolean,\n  userToken?: string\n): Promise<INormalizedModules> {\n  const zip = await downloadZip(gitInfo, commitSha, userToken);\n  let folderName = getFolderName(zip);\n\n  if (gitInfo.path) {\n    folderName += gitInfo.path + \"/\";\n  }\n\n  const result: INormalizedModules = {};\n\n  const pathArray: string[] = [];\n\n  // First process non-binary files, and save paths of binary files to request\n  await Promise.all(\n    Object.keys(zip.files).map(async (path) => {\n      if (path.startsWith(folderName)) {\n        const relativePath = path.replace(folderName, \"\");\n\n        const file = zip.files[path];\n\n        if (!file.dir) {\n          const bufferContents = await file.async(\"nodebuffer\");\n          const text = await isText(file.name, bufferContents);\n\n          if (!text) {\n            if (isPrivate) {\n              result[relativePath] = {\n                binaryContent: bufferContents.toString(\"base64\"),\n                content: \"\",\n                isBinary: true,\n              };\n            } else {\n              pathArray.push(relativePath);\n            }\n          } else {\n            const contents = await file.async(\"text\");\n            result[relativePath] = {\n              content: contents || \"\",\n              isBinary: false,\n            };\n          }\n        }\n      }\n    })\n  );\n\n  const requestsToMake = pathArray.length;\n\n  /**\n   * Check if there is enough of our CodeSandbox Github token rate limit left to be able to\n   * request all the files we need to. If there isn't, then we shouldn't make the Promise.all\n   * request because when the first 403 rate limit comes through, it rejects everything, and\n   * it wastes even more rate limit tries.\n   */\n  if (!userToken) {\n    const canRequest = await checkRemainingRateLimit(requestsToMake);\n    if (!canRequest) {\n      throw new Error(\n        \"Can't make axios requests, not enough rate limit remaining\"\n      );\n    }\n  }\n\n  // Then we can request the SHAs of binary files if there is enough rate limit left.\n  await Promise.all(\n    pathArray.map(async (relativePath) => {\n      const fileSha = await getLatestCommitShaOfFile(\n        gitInfo.username,\n        gitInfo.repo,\n        gitInfo.branch,\n        relativePath,\n        userToken\n      );\n\n      result[relativePath] = {\n        content: rawGitUrl(gitInfo, relativePath, fileSha),\n        isBinary: true,\n      };\n    })\n  );\n\n  return result;\n}\n"
  },
  {
    "path": "packages/git-extractor/src/routes/github/push/index.ts",
    "content": "import {\n  IBinaryModule,\n  IModule,\n  INormalizedModules,\n} from \"codesandbox-import-util-types\";\n\nimport delay from \"../../../utils/delay\";\nimport * as api from \"../api\";\nimport { createBlobs } from \"./utils/create-blobs\";\n\nexport interface IGitInfo {\n  username: string;\n  repo: string;\n  branch: string;\n  path?: string;\n}\n\nexport interface ITreeFile {\n  path: string;\n  mode: string;\n  type: string;\n  size: number;\n  sha: string;\n  url: string;\n}\n\nexport interface IChanges {\n  added: Array<{\n    path: string;\n    content: string;\n    encoding: \"base64\" | \"utf-8\";\n  }>;\n  deleted: string[];\n  modified: Array<{\n    path: string;\n    content: string;\n    encoding: \"base64\" | \"utf-8\";\n  }>;\n}\n\nexport type ITree = ITreeFile[];\n\nfunction generateBranchName() {\n  const id = Date.now();\n  return `csb-${id}`;\n}\n\nexport async function createBranch(\n  gitInfo: IGitInfo,\n  refSha: string,\n  userToken: string,\n  branchName: string = generateBranchName()\n) {\n  const res = await api.createReference(\n    gitInfo.username,\n    gitInfo.repo,\n    branchName,\n    refSha,\n    userToken\n  );\n\n  return { url: res.url, ref: res.ref, branchName };\n}\n\nexport async function createFork(\n  gitInfo: IGitInfo,\n  currentUser: string,\n  userToken: string\n): Promise<IGitInfo> {\n  const forkGitInfo: IGitInfo = { ...gitInfo, username: currentUser };\n\n  const existingRepo = await api.doesRepoExist(\n    forkGitInfo.username,\n    forkGitInfo.repo\n  );\n\n  if (!existingRepo) {\n    await api.createFork(gitInfo.username, gitInfo.repo, userToken);\n\n    // Forking is asynchronous, so we need to poll for when the repo has been created\n    let repoExists = false;\n    let tryCount = 0;\n    while (!repoExists) {\n      tryCount++;\n\n      if (tryCount > 300) {\n        throw new Error(\n          \"Forking repo takes longer than 5 minutes, try again later.\"\n        );\n      }\n\n      repoExists = await api.doesRepoExist(\n        forkGitInfo.username,\n        forkGitInfo.repo\n      );\n\n      await delay(1000);\n    }\n  }\n\n  return forkGitInfo;\n}\n\nexport async function createInitialCommit(\n  gitInfo: IGitInfo,\n  changes: IChanges,\n  parentShas: string[],\n  userToken: string\n) {\n  return createCommit(\n    gitInfo,\n    changes,\n    parentShas,\n    \"initial commit\",\n    userToken\n  );\n}\n\nexport async function createCommit(\n  gitInfo: IGitInfo,\n  changes: IChanges,\n  parentShas: string[],\n  message: string,\n  userToken: string\n) {\n  const { username, repo } = gitInfo;\n  let treeSha = await api.getCommitTreeSha(\n    username,\n    repo,\n    parentShas[0],\n    userToken\n  );\n  let tree: ITree = [];\n\n  if (\n    changes.added.length ||\n    changes.deleted.length ||\n    changes.modified.length\n  ) {\n    if (changes.deleted.length) {\n      tree = await api.getTreeWithDeletedFiles(\n        username,\n        repo,\n        treeSha,\n        changes.deleted,\n        userToken\n      );\n    }\n    const createdBlobs = await createBlobs(\n      [...changes.modified, ...changes.added],\n      gitInfo,\n      userToken\n    );\n    const updatedTree = tree.concat(createdBlobs);\n    const treeResponse = await api.createTree(\n      username,\n      repo,\n      updatedTree,\n      changes.deleted.length ? null : treeSha,\n      userToken\n    );\n    treeSha = treeResponse.sha;\n  }\n\n  return await api.createCommit(\n    gitInfo.username,\n    gitInfo.repo,\n    treeSha,\n    parentShas,\n    message,\n    userToken\n  );\n}\n\nexport async function createRepo(\n  username: string,\n  name: string,\n  sandboxFiles: INormalizedModules,\n  userToken: string,\n  privateRepo?: boolean\n) {\n  await api.createRepo(username, name, userToken, privateRepo);\n\n  const latestData = await api.fetchRepoInfo(\n    username,\n    name,\n    \"main\",\n    \"\",\n    true,\n    userToken\n  );\n\n  const gitInfo: IGitInfo = {\n    username: latestData.username,\n    repo: latestData.repo,\n    branch: latestData.branch,\n    path: latestData.path,\n  };\n\n  const changes: IChanges = {\n    added: Object.keys(sandboxFiles)\n      .filter((path) => sandboxFiles[path].type !== \"directory\")\n      .map((path) => {\n        if (\"binaryContent\" in sandboxFiles[path]) {\n          const file = sandboxFiles[path] as IBinaryModule;\n          return {\n            content: file.binaryContent,\n            encoding: \"base64\",\n            path,\n          };\n        }\n\n        const file = sandboxFiles[path] as IModule;\n\n        return {\n          content: file.content,\n          encoding: file.isBinary ? \"base64\" : \"utf-8\",\n          path,\n        };\n      }),\n    deleted: [],\n    modified: [],\n  };\n\n  const commit = await createCommit(\n    gitInfo,\n    changes,\n    [latestData.commitSha],\n    \"Initial commit\",\n    userToken\n  );\n\n  const res = await api.updateReference(\n    username,\n    gitInfo.repo,\n    gitInfo.branch,\n    commit.sha,\n    userToken\n  );\n\n  api.resetShaCache(gitInfo);\n\n  return gitInfo;\n}\n"
  },
  {
    "path": "packages/git-extractor/src/routes/github/push/utils/__tests__/delta.test.ts",
    "content": "import { INormalizedModules } from \"codesandbox-import-util-types\";\nimport getDelta from \"../delta\";\n\ndescribe(\"commit\", () => {\n  describe(\"delta\", () => {\n    const SAMPLE_TREE = [\n      {\n        path: \"src/App.css\",\n        mode: \"100644\",\n        type: \"blob\",\n        sha: \"15adfdc710ca89d2c427dcbb6716943e1029c73a\",\n        size: 341,\n        url:\n          \"https://api.github.com/repos/CompuIves/codesandbox-test-git-app/git/blobs/15adfdc710ca89d2c427dcbb6716943e1029c73a\",\n      },\n      {\n        path: \"src/App.js\",\n        mode: \"100644\",\n        type: \"blob\",\n        sha: \"d7d52a7f38a321668d4fa83409a7c47d1bfccd7c\",\n        size: 496,\n        url:\n          \"https://api.github.com/repos/CompuIves/codesandbox-test-git-app/git/blobs/d7d52a7f38a321668d4fa83409a7c47d1bfccd7c\",\n      },\n    ];\n\n    const SAMPLE_MODULES: INormalizedModules = {\n      \"src/App.css\": {\n        content: `.App {\n  text-align: center;\n}\n\n.App-logo {\n  animation: App-logo-spin infinite 20s linear;\n  height: 80px;\n}\n\n.App-header {\n  background-color: #222;\n  height: 150px;\n  padding: 20px;\n  color: white;\n}\n\n.App-intro {\n  font-size: large;\n}\n\n@keyframes App-logo-spin {\n  from { transform: rotate(0deg); }\n  to { transform: rotate(360deg); }\n}\n`,\n        isBinary: false,\n      },\n      \"src/App.js\": {\n        content: `import React, { Component } from 'react';\nimport logo from './logo.svg';\nimport './App.css';\n\nclass App extends Component {\n  render() {\n    return (\n      <div className=\"App\">\n        <div className=\"App-header\">\n          <img src={logo} className=\"App-logo\" alt=\"logo\" />\n          <h2>Welcome to React</h2>\n        </div>\n        <p className=\"App-intro\">\n          To get started, edit <code>src/App.js</code> and save to reload.\n        </p>\n      </div>\n    );\n  }\n}\n\nexport default App;\n`,\n        isBinary: false,\n      },\n    };\n\n    it(\"detects no change\", () => {\n      expect(getDelta(SAMPLE_TREE, SAMPLE_MODULES)).toEqual({\n        added: [],\n        deleted: [],\n        modified: [],\n      });\n    });\n\n    it(\"detects added files\", () => {\n      const newModules = {\n        ...SAMPLE_MODULES,\n        \"test.js\": { content: \"Hey\", isBinary: false },\n      };\n\n      expect(getDelta(SAMPLE_TREE, newModules)).toEqual({\n        added: [\"test.js\"],\n        deleted: [],\n        modified: [],\n      });\n    });\n\n    it(\"detects modified files\", () => {\n      const newModules = {\n        ...SAMPLE_MODULES,\n        \"src/App.js\": { content: \"Hey\", isBinary: false },\n      };\n\n      expect(getDelta(SAMPLE_TREE, newModules)).toEqual({\n        added: [],\n        deleted: [],\n        modified: [\"src/App.js\"],\n      });\n    });\n\n    it(\"detects deleted files\", () => {\n      const newModules = {\n        ...SAMPLE_MODULES,\n        \"src/App.js\": null,\n      };\n\n      expect(getDelta(SAMPLE_TREE, newModules)).toEqual({\n        added: [],\n        deleted: [\"src/App.js\"],\n        modified: [],\n      });\n    });\n  });\n});\n"
  },
  {
    "path": "packages/git-extractor/src/routes/github/push/utils/create-blobs.ts",
    "content": "import { IModule, INormalizedModules } from \"codesandbox-import-util-types\";\nimport fetch from \"node-fetch\";\n\nimport { createBlob } from \"../../api\";\nimport { IGitInfo, ITree } from \"../index\";\n\nasync function downloadContent(module: IModule): Promise<string> {\n  if (!module.isBinary) {\n    return module.content;\n  }\n\n  return fetch(module.content)\n    .then((x) => x.buffer())\n    .then((buffer) => buffer.toString(\"base64\"));\n}\n\nexport async function createBlobs(\n  files: Array<{ path: string; content: string; encoding: \"base64\" | \"utf-8\" }>,\n  gitInfo: IGitInfo,\n  token: string\n): Promise<ITree> {\n  return Promise.all(\n    files.map(async ({ path, content, encoding }) => {\n      const result = await createBlob(\n        gitInfo.username,\n        gitInfo.repo,\n        content,\n        encoding,\n        token\n      );\n\n      return {\n        path,\n        sha: result.sha,\n        size: content.length,\n        mode: \"100644\", // blob\n        type: \"blob\",\n        url: result.url,\n      };\n    })\n  );\n}\n"
  },
  {
    "path": "packages/git-extractor/src/routes/github/push/utils/delta.ts",
    "content": "import { IModule, IDirectory } from \"codesandbox-import-util-types\";\nimport { ITree } from \"../index\";\n\nimport { createHash } from \"crypto\";\n\nfunction getGitSha(content: string) {\n  const hash = createHash(\"sha1\");\n\n  hash.update(\"blob \" + new Buffer(content).length + \"\\0\" + content);\n\n  return hash.digest(\"hex\");\n}\n\ninterface INormalizedAndDeletedModules {\n  [path: string]: IModule | IDirectory | null;\n}\n\nexport default function getDelta(\n  tree: ITree,\n  modules: INormalizedAndDeletedModules\n) {\n  const added: string[] = [];\n  const modified: string[] = [];\n  const deleted: string[] = [];\n\n  tree.forEach((file) => {\n    const equivalentModule = modules[file.path];\n\n    if (!equivalentModule) {\n      deleted.push(file.path);\n    } else {\n      if (equivalentModule.type === \"directory\" || equivalentModule.isBinary) {\n        return;\n      }\n\n      if (getGitSha(equivalentModule.content) !== file.sha) {\n        modified.push(file.path);\n      }\n    }\n  });\n\n  Object.keys(modules).forEach((path) => {\n    if (!tree.find((t) => t.path === path)) {\n      added.push(path);\n    }\n  });\n\n  return { added, modified, deleted };\n}\n"
  },
  {
    "path": "packages/git-extractor/src/routes/github/types.d.ts",
    "content": "export type Module = {\n  name: string;\n  path: string;\n  sha: string;\n  size: number;\n  url: string;\n  html_url: string;\n  git_url: string;\n  download_url: string;\n  type: \"file\" | \"dir\";\n};\n\nexport type NormalizedDirectory = {\n  path: string;\n  name: string;\n  files: Array<Module>;\n  directories: Array<NormalizedDirectory>;\n};\n\nexport type DownloadedFile = Module & {\n  code: string;\n  isBinary: boolean;\n};\n"
  },
  {
    "path": "packages/git-extractor/src/utils/appsignal.ts",
    "content": "import { Appsignal } from \"@appsignal/nodejs\";\n\nexport const appsignal = new Appsignal({\n  active: process.env.NODE_ENV === \"production\",\n  name: \"Importers\",\n  environment: String(process.env.ENVIRONMENT),\n});\n"
  },
  {
    "path": "packages/git-extractor/src/utils/delay.ts",
    "content": "export default function delay(ms: number): Promise<void> {\n  return new Promise((resolve) => {\n    setTimeout(() => resolve(), ms);\n  });\n}\n"
  },
  {
    "path": "packages/git-extractor/src/utils/env.ts",
    "content": "export default process.env.NODE_ENV === \"production\"\n  ? \"production\"\n  : \"development\";\n"
  },
  {
    "path": "packages/git-extractor/src/utils/log.ts",
    "content": "import * as _debug from \"debug\";\n\nif (process.env.NODE_ENV === \"development\") {\n  _debug.enable(\"cs:*\");\n}\nconst debug = _debug(\"cs:git-extractor\");\n\nexport default function log(message: string) {\n  debug(message);\n}\n"
  },
  {
    "path": "packages/git-extractor/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    /* Basic Options */\n    \"target\": \"es6\" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', or 'ESNEXT'. */,\n    \"module\": \"commonjs\" /* Specify module code generation: 'commonjs', 'amd', 'system', 'umd' or 'es2015'. */,\n    \"lib\": [\n      \"es2015\",\n      \"dom\"\n    ] /* Specify library files to be included in the compilation:  */,\n    // \"allowJs\": true,                       /* Allow javascript files to be compiled. */\n    // \"checkJs\": true,                       /* Report errors in .js files. */\n    // \"jsx\": \"preserve\",                     /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */\n    // \"declaration\": true,                   /* Generates corresponding '.d.ts' file. */\n    \"sourceMap\": true /* Generates corresponding '.map' file. */,\n    // \"outFile\": \"./\",                       /* Concatenate and emit output to single file. */\n    \"outDir\": \"./dist\" /* Redirect output structure to the directory. */,\n    // \"rootDir\": \"./\",                       /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */\n    // \"removeComments\": true,                /* Do not emit comments to output. */\n    // \"noEmit\": true,                        /* Do not emit outputs. */\n    // \"importHelpers\": true,                 /* Import emit helpers from 'tslib'. */\n    // \"downlevelIteration\": true,            /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */\n    // \"isolatedModules\": true,               /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */\n    /* Strict Type-Checking Options */\n    \"strict\": true /* Enable all strict type-checking options. */,\n    // \"noImplicitAny\": true,                 /* Raise error on expressions and declarations with an implied 'any' type. */\n    // \"strictNullChecks\": true,              /* Enable strict null checks. */\n    // \"noImplicitThis\": true,                /* Raise error on 'this' expressions with an implied 'any' type. */\n    // \"alwaysStrict\": true,                  /* Parse in strict mode and emit \"use strict\" for each source file. */\n    /* Additional Checks */\n    // \"noUnusedLocals\": true,                /* Report errors on unused locals. */\n    // \"noUnusedParameters\": true,            /* Report errors on unused parameters. */\n    // \"noImplicitReturns\": true,             /* Report error when not all code paths in function return a value. */\n    // \"noFallthroughCasesInSwitch\": true,    /* Report errors for fallthrough cases in switch statement. */\n    /* Module Resolution Options */\n    // \"moduleResolution\": \"node\",            /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */\n    // \"baseUrl\": \"./\",                       /* Base directory to resolve non-absolute module names. */\n    // \"paths\": {},                           /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */\n    // \"rootDirs\": [],                        /* List of root folders whose combined content represents the structure of the project at runtime. */\n    // \"typeRoots\": []                        /* List of folders to include type definitions from. */\n    // \"types\": []                            /* Type declaration files to be included in compilation. */\n    // \"allowSyntheticDefaultImports\": true,  /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */\n    /* Source Map Options */\n    // \"sourceRoot\": \"./\",                    /* Specify the location where debugger should locate TypeScript files instead of source locations. */\n    // \"mapRoot\": \"./\",                       /* Specify the location where debugger should locate map files instead of generated locations. */\n    // \"inlineSourceMap\": true,               /* Emit a single file with source maps instead of having a separate file. */\n    // \"inlineSources\": true,                 /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */\n    /* Experimental Options */\n    // \"experimentalDecorators\": true,        /* Enables experimental support for ES7 decorators. */\n    // \"emitDecoratorMetadata\": true,         /* Enables experimental support for emitting type metadata for decorators. */\n    \"skipLibCheck\": true,\n    \"useUnknownInCatchVariables\": false\n  },\n  \"include\": [\"src/**/*.ts\", \"typings/**/*.ts\"],\n  \"exclude\": [\"node_modules\", \"__tests__\", \"build\", \"**/*.test.ts\", \"temp\"]\n}\n"
  },
  {
    "path": "packages/hmaeo.yml",
    "content": "heahea\n"
  },
  {
    "path": "packages/import-utils/.gitignore",
    "content": "*.js\n*.js.map\nlib\n"
  },
  {
    "path": "packages/import-utils/LICENSE",
    "content": "                   GNU LESSER GENERAL PUBLIC LICENSE\n                       Version 3, 29 June 2007\n\nCopyright (C) 2018 CodeSandbox BV. <https://codesandbox.io/>\nEveryone is permitted to copy and distribute verbatim copies\nof this license document, but changing it is not allowed.\n\nThis version of the GNU Lesser General Public License incorporates\nthe terms and conditions of version 3 of the GNU General Public\nLicense, supplemented by the additional permissions listed below.\n\n0.  Additional Definitions.\n\nAs used herein, \"this License\" refers to version 3 of the GNU Lesser\nGeneral Public License, and the \"GNU GPL\" refers to version 3 of the GNU\nGeneral Public License.\n\n\"The Library\" refers to a covered work governed by this License,\nother than an Application or a Combined Work as defined below.\n\nAn \"Application\" is any work that makes use of an interface provided\nby the Library, but which is not otherwise based on the Library.\nDefining a subclass of a class defined by the Library is deemed a mode\nof using an interface provided by the Library.\n\nA \"Combined Work\" is a work produced by combining or linking an\nApplication with the Library. The particular version of the Library\nwith which the Combined Work was made is also called the \"Linked\nVersion\".\n\nThe \"Minimal Corresponding Source\" for a Combined Work means the\nCorresponding Source for the Combined Work, excluding any source code\nfor portions of the Combined Work that, considered in isolation, are\nbased on the Application, and not on the Linked Version.\n\nThe \"Corresponding Application Code\" for a Combined Work means the\nobject code and/or source code for the Application, including any data\nand utility programs needed for reproducing the Combined Work from the\nApplication, but excluding the System Libraries of the Combined Work.\n\n1.  Exception to Section 3 of the GNU GPL.\n\nYou may convey a covered work under sections 3 and 4 of this License\nwithout being bound by section 3 of the GNU GPL.\n\n2.  Conveying Modified Versions.\n\nIf you modify a copy of the Library, and, in your modifications, a\nfacility refers to a function or data to be supplied by an Application\nthat uses the facility (other than as an argument passed when the\nfacility is invoked), then you may convey a copy of the modified\nversion:\n\na) under this License, provided that you make a good faith effort to\nensure that, in the event an Application does not supply the\nfunction or data, the facility still operates, and performs\nwhatever part of its purpose remains meaningful, or\n\nb) under the GNU GPL, with none of the additional permissions of\nthis License applicable to that copy.\n\n3.  Object Code Incorporating Material from Library Header Files.\n\nThe object code form of an Application may incorporate material from\na header file that is part of the Library. You may convey such object\ncode under terms of your choice, provided that, if the incorporated\nmaterial is not limited to numerical parameters, data structure\nlayouts and accessors, or small macros, inline functions and templates\n(ten or fewer lines in length), you do both of the following:\n\na) Give prominent notice with each copy of the object code that the\nLibrary is used in it and that the Library and its use are\ncovered by this License.\n\nb) Accompany the object code with a copy of the GNU GPL and this license\ndocument.\n\n4.  Combined Works.\n\nYou may convey a Combined Work under terms of your choice that,\ntaken together, effectively do not restrict modification of the\nportions of the Library contained in the Combined Work and reverse\nengineering for debugging such modifications, if you also do each of\nthe following:\n\na) Give prominent notice with each copy of the Combined Work that\nthe Library is used in it and that the Library and its use are\ncovered by this License.\n\nb) Accompany the Combined Work with a copy of the GNU GPL and this license\ndocument.\n\nc) For a Combined Work that displays copyright notices during\nexecution, include the copyright notice for the Library among\nthese notices, as well as a reference directing the user to the\ncopies of the GNU GPL and this license document.\n\nd) Do one of the following:\n\n       0) Convey the Minimal Corresponding Source under the terms of this\n       License, and the Corresponding Application Code in a form\n       suitable for, and under terms that permit, the user to\n       recombine or relink the Application with a modified version of\n       the Linked Version to produce a modified Combined Work, in the\n       manner specified by section 6 of the GNU GPL for conveying\n       Corresponding Source.\n\n       1) Use a suitable shared library mechanism for linking with the\n       Library.  A suitable mechanism is one that (a) uses at run time\n       a copy of the Library already present on the user's computer\n       system, and (b) will operate properly with a modified version\n       of the Library that is interface-compatible with the Linked\n       Version.\n\ne) Provide Installation Information, but only if you would otherwise\nbe required to provide such information under section 6 of the\nGNU GPL, and only to the extent that such information is\nnecessary to install and execute a modified version of the\nCombined Work produced by recombining or relinking the\nApplication with a modified version of the Linked Version. (If\nyou use option 4d0, the Installation Information must accompany\nthe Minimal Corresponding Source and Corresponding Application\nCode. If you use option 4d1, you must provide the Installation\nInformation in the manner specified by section 6 of the GNU GPL\nfor conveying Corresponding Source.)\n\n5.  Combined Libraries.\n\nYou may place library facilities that are a work based on the\nLibrary side by side in a single library together with other library\nfacilities that are not Applications and are not covered by this\nLicense, and convey such a combined library under terms of your\nchoice, if you do both of the following:\n\na) Accompany the combined library with a copy of the same work based\non the Library, uncombined with any other library facilities,\nconveyed under the terms of this License.\n\nb) Give prominent notice with the combined library that part of it\nis a work based on the Library, and explaining where to find the\naccompanying uncombined form of the same work.\n\n6.  Revised Versions of the GNU Lesser General Public License.\n\nThe Free Software Foundation may publish revised and/or new versions\nof the GNU Lesser General Public License from time to time. Such new\nversions will be similar in spirit to the present version, but may\ndiffer in detail to address new problems or concerns.\n\nEach version is given a distinguishing version number. If the\nLibrary as you received it specifies that a certain numbered version\nof the GNU Lesser General Public License \"or any later version\"\napplies to it, you have the option of following the terms and\nconditions either of that published version or of any later version\npublished by the Free Software Foundation. If the Library as you\nreceived it does not specify a version number of the GNU Lesser\nGeneral Public License, you may choose any version of the GNU Lesser\nGeneral Public License ever published by the Free Software Foundation.\n\nIf the Library as you received it specifies that a proxy can decide\nwhether future versions of the GNU Lesser General Public License shall\napply, that proxy's public statement of acceptance of any version is\npermanent authorization for you to choose that version for the\nLibrary.\n"
  },
  {
    "path": "packages/import-utils/package.json",
    "content": "{\n  \"name\": \"codesandbox-import-utils\",\n  \"version\": \"2.2.3\",\n  \"main\": \"lib/index.js\",\n  \"files\": [\n    \"lib/**\"\n  ],\n  \"scripts\": {\n    \"build\": \"rimraf lib && tsc -p tsconfig.json\",\n    \"test\": \"jest\",\n    \"test:watch\": \"jest --watch\",\n    \"prepublish\": \"yarn build\"\n  },\n  \"devDependencies\": {\n    \"rimraf\": \"^2.6.2\"\n  },\n  \"dependencies\": {\n    \"codesandbox-import-util-types\": \"^2.2.3\",\n    \"istextorbinary\": \"^6.0.0\",\n    \"lz-string\": \"^1.4.4\"\n  },\n  \"jest\": {\n    \"transform\": {\n      \".(ts|tsx)\": \"<rootDir>../../node_modules/ts-jest/preprocessor.js\"\n    },\n    \"testEnvironment\": \"node\",\n    \"moduleFileExtensions\": [\n      \"ts\",\n      \"tsx\",\n      \"js\",\n      \"json\"\n    ],\n    \"testPathIgnorePatterns\": [\n      \"<rootDir>/node_modules/\",\n      \"<rootDir>/dist/\",\n      \"<rootDir>/lib/\"\n    ],\n    \"testRegex\": \"(/__tests__/.*|\\\\.(test|spec))\\\\.(ts|tsx|js)$\"\n  },\n  \"gitHead\": \"3cdcdea389d39f2a92be73dcb73496f68c8ada41\"\n}\n"
  },
  {
    "path": "packages/import-utils/src/api/define.ts",
    "content": "import { ITemplate } from \"codesandbox-import-util-types\";\nimport * as LZString from \"lz-string\";\n\nexport interface IFiles {\n  [key: string]: {\n    content: string;\n    isBinary: boolean;\n  };\n}\n\nfunction compress(input: string) {\n  return LZString.compressToBase64(input)\n    .replace(/\\+/g, `-`) // Convert '+' to '-'\n    .replace(/\\//g, `_`) // Convert '/' to '_'\n    .replace(/=+$/, ``); // Remove ending '='\n}\n\nexport function getParameters(parameters: {\n  files: IFiles;\n  template?: ITemplate;\n}) {\n  return compress(JSON.stringify(parameters));\n}\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/__mocks__/pacote.ts",
    "content": "export const manifest = () => {\n  return { version: \"15.5.4\" };\n};\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/__tests__/__snapshots__/html-parser.test.ts.snap",
    "content": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n\nexports[`html-parser can retrieve body from html 1`] = `\n{\n  \"body\": \"\n          <div id=\"root\"></div>\n          <!--\n            This HTML file is a template.\n            If you open it directly in the browser, you will see an empty page.\n            You can add webfonts, meta tags, or analytics to this file.\n            The build step will place the bundled scripts into the <body> tag.\n            To begin the development, run \\`npm start\\` in this folder.\n            To create a production bundle, use \\`npm run build\\`.\n          -->\n        \",\n  \"externalResources\": [],\n}\n`;\n\nexports[`html-parser can retrieve css external resources 1`] = `\n{\n  \"body\": \"\n          <div id=\"root\"></div>\n          <!--\n            This HTML file is a template.\n            If you open it directly in the browser, you will see an empty page.\n            You can add webfonts, meta tags, or analytics to this file.\n            The build step will place the bundled scripts into the <body> tag.\n            To begin the development, run \\`npm start\\` in this folder.\n            To create a production bundle, use \\`npm run build\\`.\n          -->\n        \",\n  \"externalResources\": [\n    \"https://redux-form.com/6.8.0/bundle.css\",\n    \"//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.3.0/css/font-awesome.min.css\",\n  ],\n}\n`;\n\nexports[`html-parser can retrieve js external resources 1`] = `\n{\n  \"body\": \"\n          <div id=\"root\"></div>\n          <!--\n            This HTML file is a template.\n            If you open it directly in the browser, you will see an empty page.\n            You can add webfonts, meta tags, or analytics to this file.\n            The build step will place the bundled scripts into the <body> tag.\n            To begin the development, run \\`npm start\\` in this folder.\n            To create a production bundle, use \\`npm run build\\`.\n          -->\n          <script type=\"text/javascript\" src=\"https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.slim.min.js\"></script>\n        \",\n  \"externalResources\": [\n    \"https://redux-form.com/6.8.0/bundle.css\",\n    \"//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.3.0/css/font-awesome.min.css\",\n    \"https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.slim.min.js\",\n  ],\n}\n`;\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/__tests__/html-parser.test.ts",
    "content": "import parser from \"../html-parser\";\n\ndescribe(\"html-parser\", () => {\n  it(\"can retrieve body from html\", () => {\n    const BODY_HTML = `\n      <!doctype html>\n      <html lang=\"en\">\n        <head>\n          <meta charset=\"utf-8\">\n          <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n          <title>Redux Shopping Cart Example</title>\n        </head>\n        <body>\n          <div id=\"root\"></div>\n          <!--\n            This HTML file is a template.\n            If you open it directly in the browser, you will see an empty page.\n            You can add webfonts, meta tags, or analytics to this file.\n            The build step will place the bundled scripts into the <body> tag.\n            To begin the development, run \\`npm start\\` in this folder.\n            To create a production bundle, use \\`npm run build\\`.\n          -->\n        </body>\n      </html>\n    `;\n    expect(parser(BODY_HTML)).toMatchSnapshot();\n  });\n\n  it(\"can retrieve js external resources\", () => {\n    const BODY_HTML = `\n      <!doctype html>\n      <html lang=\"en\">\n        <head>\n          <meta charset=\"utf-8\">\n          <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n          <title>Redux Shopping Cart Example</title>\n          <link href=\"https://redux-form.com/6.8.0/bundle.css\"\n            media=\"screen, projection\"\n            rel=\"stylesheet\" type=\"text/css\"/>\n          <link href=\"//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.3.0/css/font-awesome.min.css\"\n            media=\"screen, projection\" rel=\"stylesheet\" type=\"text/css\"/>\n        </head>\n        <body>\n          <div id=\"root\"></div>\n          <!--\n            This HTML file is a template.\n            If you open it directly in the browser, you will see an empty page.\n            You can add webfonts, meta tags, or analytics to this file.\n            The build step will place the bundled scripts into the <body> tag.\n            To begin the development, run \\`npm start\\` in this folder.\n            To create a production bundle, use \\`npm run build\\`.\n          -->\n          <script type=\"text/javascript\" src=\"https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.slim.min.js\"></script>\n        </body>\n      </html>\n    `;\n\n    expect(parser(BODY_HTML)).toMatchSnapshot();\n  });\n\n  it(\"can retrieve css external resources\", () => {\n    const BODY_HTML = `\n      <!doctype html>\n      <html lang=\"en\">\n        <head>\n          <meta charset=\"utf-8\">\n          <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n          <title>Redux Shopping Cart Example</title>\n          <link href=\"https://redux-form.com/6.8.0/bundle.css\"\n            media=\"screen, projection\"\n            rel=\"stylesheet\" type=\"text/css\"/>\n          <link href=\"//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.3.0/css/font-awesome.min.css\"\n            media=\"screen, projection\" rel=\"stylesheet\" type=\"text/css\"/>\n        </head>\n        <body>\n          <div id=\"root\"></div>\n          <!--\n            This HTML file is a template.\n            If you open it directly in the browser, you will see an empty page.\n            You can add webfonts, meta tags, or analytics to this file.\n            The build step will place the bundled scripts into the <body> tag.\n            To begin the development, run \\`npm start\\` in this folder.\n            To create a production bundle, use \\`npm run build\\`.\n          -->\n        </body>\n      </html>\n    `;\n\n    expect(parser(BODY_HTML)).toMatchSnapshot();\n  });\n});\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/__tests__/templates.test.ts",
    "content": "import { getTemplate } from \"../templates\";\n\ndescribe(\"template detection\", () => {\n  it(\"detects a react template\", () => {\n    expect(\n      getTemplate(\n        {\n          dependencies: {},\n          devDependencies: {\n            \"react-scripts\": \"latest\",\n          },\n        },\n        {}\n      )\n    ).toEqual(\"create-react-app\");\n  });\n\n  it(\"detects a react template from forked create-react-app\", () => {\n    expect(\n      getTemplate(\n        {\n          dependencies: {},\n          devDependencies: {\n            \"@fork/react-scripts\": \"latest\",\n          },\n        },\n        {}\n      )\n    ).toEqual(\"create-react-app\");\n  });\n\n  it(\"detects a nuxt template\", () => {\n    expect(\n      getTemplate(\n        {\n          dependencies: {},\n          devDependencies: {\n            nuxt: \"latest\",\n          },\n        },\n        {}\n      )\n    ).toEqual(\"nuxt\");\n  });\n\n  it(\"detects a nuxt template when using nuxt3\", () => {\n    expect(\n      getTemplate(\n        {\n          dependencies: {},\n          devDependencies: {\n            nuxt3: \"latest\",\n          },\n        },\n        {}\n      )\n    ).toEqual(\"nuxt\");\n  });\n\n  it(\"detects an apollo template\", () => {\n    expect(\n      getTemplate(\n        {\n          dependencies: {},\n          devDependencies: {\n            \"apollo-server\": \"latest\",\n          },\n        },\n        {}\n      )\n    ).toEqual(\"apollo\");\n  });\n});\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/html-parser.ts",
    "content": "function isValidResource(resource: string) {\n  return (\n    resource.startsWith(\"https://\") ||\n    resource.startsWith(\"http://\") ||\n    resource.startsWith(\"//\")\n  );\n}\n\n/**\n * Checks line for css resource, returns if exist\n *\n * @param {string} line  line to check\n * @returns {(string | undefined)}\n */\nfunction getCssResource(line: string): string | undefined {\n  const cssRegex = /<link[^]*href=\"(.*\\.css)\"/;\n  const match = line.match(cssRegex);\n  if (match && match[1]) {\n    const resource = match[1];\n    if (!isValidResource(resource)) {\n      return;\n    }\n\n    return resource;\n  }\n}\n\n/**\n * Checks line for js resource, returns if resource exist\n *\n * @param {string} line  line to check\n * @returns {(string | undefined)}\n */\nfunction getJsResource(line: string): string | undefined {\n  const jsRegex = /<script[^]*src=\"(.*)\"/;\n  const match = line.match(jsRegex);\n  if (match && match[1]) {\n    const resource = match[1];\n    if (!isValidResource(resource)) {\n      return;\n    }\n\n    return resource;\n  }\n}\n\n/**\n * Returns an array of strings to external resources, we deliberately don't check\n * for javascript, since this is often added to the body. The body will be copied over\n *\n * @param {string} html\n */\nfunction getExternalResources(html: string) {\n  return html\n    .split(\"\\n\")\n    .map((line) => getCssResource(line) || getJsResource(line))\n    .filter((x) => x);\n}\n\n/**\n * Get all information in the body\n *\n * @param {string} html\n */\nfunction getBodyContent(html: string): string | undefined {\n  const bodyRegex = /<body>([^]*)<\\/body>/;\n\n  const match = html.match(bodyRegex);\n\n  if (match) {\n    return match[1];\n  }\n}\n\n/**\n * Parses the html for external resources and body\n *\n * @export\n * @param {string} html\n */\nexport default function parseHTML(html: string) {\n  const externalResources = getExternalResources(html);\n  const bodyContent = getBodyContent(html);\n\n  return {\n    body: bodyContent || '<div id=\"root\"></div>',\n    externalResources,\n  };\n}\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/index.ts",
    "content": "import {\n  INormalizedModules,\n  IModule,\n  ISandbox,\n  ITemplate,\n} from \"codesandbox-import-util-types\";\nimport denormalize from \"../utils/files/denormalize\";\nimport { parse as parseEnv } from \"envfile\";\n\nimport parseHTML from \"./html-parser\";\nimport { getMainFile, getTemplate } from \"./templates\";\n\ninterface IDependencies {\n  [name: string]: string;\n}\n\nfunction getHTMLInfo(html: IModule | undefined) {\n  if (!html) {\n    return { externalResources: [], file: null };\n  }\n\n  const { externalResources } = parseHTML(html.content);\n\n  return { externalResources, file: html };\n}\n\nfunction findMainFile(\n  directory: INormalizedModules,\n  mainFile: string,\n  template: ITemplate\n) {\n  if (directory[mainFile]) {\n    return mainFile;\n  }\n  if (directory[getMainFile(template)]) {\n    return getMainFile(template);\n  }\n  if (directory[\"src/index.js\"]) {\n    return \"src/index.js\";\n  }\n  if (directory[\"index.js\"]) {\n    return \"index.js\";\n  }\n\n  return mainFile || getMainFile(template);\n}\n\nconst CLOUD_TEMPLATES = [\n  \"adonis\",\n  \"vue-cli\",\n  \"svelte\",\n  \"angular-cli\",\n  \"cxjs\",\n  \"gatsby\",\n  \"nuxt\",\n  \"next\",\n  \"reason\",\n  \"apollo\",\n  \"sapper\",\n  \"ember\",\n  \"nest\",\n  \"styleguidist\",\n  \"gridsome\",\n  \"vuepress\",\n  \"mdx-deck\",\n  \"quasar\",\n  \"docusaurus\",\n  \"remix\",\n  \"node\",\n];\nfunction isCloudTemplate(template: ITemplate): boolean {\n  return CLOUD_TEMPLATES.indexOf(template) > -1;\n}\n\nfunction getSandboxMetadata(directory: INormalizedModules): {\n  title: string;\n  description: string;\n  tags: string[];\n  iconUrl?: string;\n} {\n  const packageJson = directory[\"package.json\"];\n  if (packageJson && packageJson.type === \"directory\") {\n    throw new Error(\"package.json is a directory\");\n  }\n\n  let packageJsonPackage = packageJson ? JSON.parse(packageJson.content) : null;\n\n  const packageJsonInfo = {\n    title: packageJsonPackage?.title || packageJsonPackage?.name,\n    description: packageJsonPackage?.description,\n    tags: packageJsonPackage?.keywords || [],\n    iconUrl: packageJsonPackage?.iconUrl,\n  };\n\n  const templateInfo = directory[\".codesandbox/template.json\"];\n  if (templateInfo && \"content\" in templateInfo) {\n    const content = JSON.parse(templateInfo.content);\n\n    return {\n      title: content.title || packageJsonInfo.title,\n      description: content.description || packageJsonInfo.description,\n      tags: content.tags || packageJsonInfo.tags,\n      iconUrl: content.iconUrl || packageJsonInfo.iconUrl,\n    };\n  }\n\n  return packageJsonInfo;\n}\n\n/**\n * Gets the prefilled environment variables by parsing either /.env.example\n * or /.env.\n */\nfunction getEnvironmentVariables(directory: INormalizedModules) {\n  const envFile = directory[\".env\"] || directory[\".env.example\"];\n\n  if (!envFile || envFile.type !== \"file\") {\n    return {};\n  }\n\n  return parseEnv(envFile.content);\n}\n\n/**\n * Creates all relevant data for create a sandbox, like dependencies and which\n * files are in a sandbox\n *\n * @export SandboxObject\n * @param {Array<Module>} files\n * @param {Array<Module>} directories\n */\nexport default async function createSandbox(\n  directory: INormalizedModules\n): Promise<ISandbox> {\n  const packageJson = directory[\"package.json\"];\n  if (packageJson && packageJson.type === \"directory\") {\n    throw new Error(\"package.json is a directory\");\n  }\n\n  let packageJsonPackage = packageJson ? JSON.parse(packageJson.content) : null;\n  let template = getTemplate(packageJsonPackage, directory);\n\n  if (template === undefined) {\n    console.log(\"Got undefined template, defaulting to 'create-react-app'\");\n\n    template = \"create-react-app\";\n  } else {\n    console.log(`Creating sandbox with template '${template}'`);\n  }\n\n  packageJsonPackage = packageJsonPackage || { main: \"/index.html\" };\n\n  const mainFileUnix = findMainFile(\n    directory,\n    packageJsonPackage.main,\n    template\n  );\n  const mainFile =\n    process.platform === \"win32\"\n      ? mainFileUnix.replace(/\\//g, \"\\\\\")\n      : mainFileUnix;\n\n  // Give the sandboxModules to getDependencies to fetch which devDependencies\n  // are used in the code\n  const metadata = getSandboxMetadata(directory);\n\n  const { modules, directories } = denormalize(directory);\n\n  return {\n    title: metadata.title,\n    description: metadata.description,\n    tags: metadata.tags,\n    modules,\n    directories,\n    externalResources: [],\n    environmentVariables: getEnvironmentVariables(directory),\n    template,\n    entry: mainFile,\n    v2: isCloudTemplate(template),\n    templateParams: metadata.iconUrl\n      ? {\n          iconUrl: metadata.iconUrl,\n        }\n      : undefined,\n  };\n}\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/templates.ts",
    "content": "import { INormalizedModules, ITemplate } from \"codesandbox-import-util-types\";\n\nexport function getMainFile(template: ITemplate) {\n  switch (template) {\n    case \"adonis\":\n      return \"server.js\";\n    case \"vue-cli\":\n      return \"src/main.js\";\n    case \"angular-cli\":\n      return \"src/main.ts\";\n    case \"create-react-app-typescript\":\n      return \"src/main.tsx\";\n    case \"parcel\":\n    case \"static\":\n      return \"index.html\";\n    case \"gatsby\":\n      return \"src/pages/index.js\";\n    case \"gridsome\":\n      return \"src/pages/Index.vue\";\n    case \"mdx-deck\":\n      return \"deck.mdx\";\n    case \"quasar\":\n      return \"src/pages/Index.vue\";\n\n    case \"styleguidist\":\n    case \"nuxt\":\n    case \"next\":\n    case \"apollo\":\n    case \"reason\":\n    case \"sapper\":\n    case \"nest\":\n    case \"remix\":\n    case \"vuepress\":\n    case \"styleguidist\":\n      return \"package.json\";\n    default:\n      return \"src/index.js\";\n  }\n}\n\nconst SANDBOX_CONFIG = \"sandbox.config.json\";\nconst TEMPLATE_CONFIG = \".codesandbox/template.json\";\nconst MAX_CLIENT_DEPENDENCY_COUNT = 50;\n\ntype Dependencies = { [name: string]: string };\ntype PackageJSON = {\n  dependencies?: Dependencies;\n  devDependencies?: Dependencies;\n};\nexport function getTemplate(\n  pkg: PackageJSON | null,\n  modules: INormalizedModules\n): ITemplate | undefined {\n  const sandboxConfig =\n    modules[SANDBOX_CONFIG] || modules[`/${SANDBOX_CONFIG}`];\n  if (sandboxConfig && sandboxConfig.type !== \"directory\") {\n    try {\n      const config = JSON.parse(sandboxConfig.content);\n\n      if (config.template) {\n        return config.template;\n      }\n    } catch (e) {}\n  }\n\n  const templateConfig =\n    modules[TEMPLATE_CONFIG] || modules[`/${TEMPLATE_CONFIG}`];\n  if (templateConfig && templateConfig.type !== \"directory\") {\n    try {\n      const config = JSON.parse(templateConfig.content);\n\n      if (config.runtime) {\n        return config.runtime;\n      }\n    } catch (e) {}\n  }\n\n  if (\n    \".codesandbox/Dockerfile\" in modules ||\n    \".devcontainer/devcontainer.json\" in modules\n  ) {\n    // We should return \"cloud\" here, once the server supports it.\n    return \"node\";\n  }\n\n  if (!pkg) {\n    return \"static\";\n  }\n\n  const { dependencies = {}, devDependencies = {} } = pkg;\n\n  const totalDependencies = [\n    ...Object.keys(dependencies),\n    ...Object.keys(devDependencies),\n  ];\n  const moduleNames = Object.keys(modules);\n\n  const adonis = [\"@adonisjs/framework\", \"@adonisjs/core\"];\n\n  if (totalDependencies.some((dep) => adonis.indexOf(dep) > -1)) {\n    return \"adonis\";\n  }\n\n  const nuxt = [\"nuxt\", \"nuxt-edge\", \"nuxt-ts\", \"nuxt-ts-edge\", \"nuxt3\"];\n\n  if (totalDependencies.some((dep) => nuxt.indexOf(dep) > -1)) {\n    return \"nuxt\";\n  }\n\n  if (totalDependencies.indexOf(\"next\") > -1) {\n    return \"next\";\n  }\n\n  const apollo = [\n    \"apollo-server\",\n    \"apollo-server-express\",\n    \"apollo-server-hapi\",\n    \"apollo-server-koa\",\n    \"apollo-server-lambda\",\n    \"apollo-server-micro\",\n  ];\n\n  if (totalDependencies.some((dep) => apollo.indexOf(dep) > -1)) {\n    return \"apollo\";\n  }\n\n  if (totalDependencies.indexOf(\"mdx-deck\") > -1) {\n    return \"mdx-deck\";\n  }\n\n  if (totalDependencies.indexOf(\"gridsome\") > -1) {\n    return \"gridsome\";\n  }\n\n  if (totalDependencies.indexOf(\"vuepress\") > -1) {\n    return \"vuepress\";\n  }\n\n  if (totalDependencies.indexOf(\"ember-cli\") > -1) {\n    return \"ember\";\n  }\n\n  if (totalDependencies.indexOf(\"sapper\") > -1) {\n    return \"sapper\";\n  }\n\n  if (totalDependencies.indexOf(\"gatsby\") > -1) {\n    return \"gatsby\";\n  }\n\n  if (totalDependencies.indexOf(\"quasar\") > -1) {\n    return \"quasar\";\n  }\n\n  if (totalDependencies.indexOf(\"@docusaurus/core\") > -1) {\n    return \"docusaurus\";\n  }\n\n  if (totalDependencies.indexOf(\"remix\") > -1) {\n    return \"remix\";\n  }\n\n  if (totalDependencies.indexOf(\"astro\") > -1) {\n    return \"node\";\n  }\n\n  if (totalDependencies.indexOf(\"vite\") > -1) {\n    if (totalDependencies.indexOf(\"react-redux\") > -1) {\n      // Pretty bad hack to ensure that the examples of Redux\n      // still run in the old embed: https://github.com/codesandbox/codesandbox-client/issues/8282\n      //\n      // We should remove this once either:\n      // 1. the existing embed works with VMs\n      // 2. our new embeds support all query params\n      return \"create-react-app\";\n    }\n\n    return \"node\";\n  }\n\n  const tanstackDependencies = [\n    \"@tanstack/start\",\n    \"@tanstack/solid-start\",\n    \"@tanstack/react-start\",\n  ];\n  if (totalDependencies.some((dep) => tanstackDependencies.indexOf(dep) > -1)) {\n    return \"node\";\n  }\n\n  if (totalDependencies.indexOf(\"vanjs-core\") > -1) {\n    return \"node\";\n  }\n\n  if (totalDependencies.indexOf(\"mini-van-plate\") > -1) {\n    return \"node\";\n  }\n\n  // CLIENT\n\n  if (moduleNames.some((m) => m.endsWith(\".re\"))) {\n    return \"reason\";\n  }\n\n  const parcel = [\"parcel-bundler\", \"parcel\"];\n  if (totalDependencies.some((dep) => parcel.indexOf(dep) > -1)) {\n    return \"parcel\";\n  }\n\n  const dojo = [\"@dojo/core\", \"@dojo/framework\"];\n  if (totalDependencies.some((dep) => dojo.indexOf(dep) > -1)) {\n    return \"@dojo/cli-create-app\";\n  }\n  if (\n    totalDependencies.indexOf(\"@nestjs/core\") > -1 ||\n    totalDependencies.indexOf(\"@nestjs/common\") > -1\n  ) {\n    return \"nest\";\n  }\n\n  if (totalDependencies.indexOf(\"react-styleguidist\") > -1) {\n    return \"styleguidist\";\n  }\n\n  if (\n    totalDependencies.some((dependency) =>\n      /^(@[\\w-]+\\/)?react-scripts$/.test(dependency)\n    )\n  ) {\n    return \"create-react-app\";\n  }\n\n  if (totalDependencies.indexOf(\"react-scripts-ts\") > -1) {\n    return \"create-react-app-typescript\";\n  }\n\n  if (totalDependencies.indexOf(\"@angular/core\") > -1) {\n    return \"angular-cli\";\n  }\n\n  if (totalDependencies.indexOf(\"preact-cli\") > -1) {\n    return \"preact-cli\";\n  }\n\n  if (\n    totalDependencies.indexOf(\"@sveltech/routify\") > -1 ||\n    totalDependencies.indexOf(\"@roxi/routify\") > -1\n  ) {\n    return \"node\";\n  }\n\n  if (totalDependencies.indexOf(\"@frontity/core\") > -1) {\n    return \"node\";\n  }\n\n  if (totalDependencies.indexOf(\"svelte\") > -1) {\n    return \"svelte\";\n  }\n\n  if (totalDependencies.indexOf(\"vue\") > -1) {\n    return \"vue-cli\";\n  }\n\n  if (totalDependencies.indexOf(\"cx\") > -1) {\n    return \"cxjs\";\n  }\n\n  const nodeDeps = [\n    \"express\",\n    \"koa\",\n    \"nodemon\",\n    \"ts-node\",\n    \"@tensorflow/tfjs-node\",\n    \"webpack-dev-server\",\n    \"snowpack\",\n  ];\n  if (totalDependencies.some((dep) => nodeDeps.indexOf(dep) > -1)) {\n    return \"node\";\n  }\n\n  if (Object.keys(dependencies).length >= MAX_CLIENT_DEPENDENCY_COUNT) {\n    // The dependencies are too much for client sandboxes to handle\n    return \"node\";\n  }\n\n  return undefined;\n}\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/utils/__tests__/__snapshots__/resolve.test.ts.snap",
    "content": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n\nexports[`resolve getDirectoryPaths can resolve dir paths 1`] = `\n{\n  \"/world\": {\n    \"directoryShortid\": undefined,\n    \"shortid\": \"dir1\",\n    \"title\": \"world\",\n  },\n  \"/world/hello\": {\n    \"directoryShortid\": \"dir1\",\n    \"shortid\": \"dir2\",\n    \"title\": \"hello\",\n  },\n}\n`;\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/utils/__tests__/extract-requires.test.ts",
    "content": "import extractRequires from \"../extract-requires\";\n\ndescribe(\"extractRequires\", () => {\n  it(\"can find simple requires\", () => {\n    const code = `\n      import React from 'react';\n    `;\n\n    expect(extractRequires(code)).toEqual([\"react\"]);\n  });\n\n  it(\"can find require statements\", () => {\n    const code = `\n      const react = require('react');\n    `;\n\n    expect(extractRequires(code)).toEqual([\"react\"]);\n  });\n\n  it(\"can find dynamic require statements\", () => {\n    const code = `\n    const react = import('react');\n  `;\n\n    expect(extractRequires(code)).toEqual([\"react\"]);\n  });\n\n  it(\"can find multiple statements\", () => {\n    const code = `\n      import angular from 'angular';\n      import test from './test';\n      const react = import('react');\n\n      function run() {\n        const a = require('./test2');\n      }\n    `;\n\n    expect(extractRequires(code)).toEqual([\n      \"angular\",\n      \"./test\",\n      \"react\",\n      \"./test2\",\n    ]);\n  });\n\n  it(\"can find import promises\", () => {\n    const code = `\n      const reactDom = import('react-dom').then(dom => dom.render('a'));\n    `;\n\n    expect(extractRequires(code)).toEqual([\"react-dom\"]);\n  });\n});\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/utils/__tests__/resolve.test.ts",
    "content": "import { getDirectoryPaths } from \"../resolve\";\n\ndescribe(\"resolve\", () => {\n  describe(\"getDirectoryPaths\", () => {\n    it(\"can resolve dir paths\", () => {\n      const existingDirs = [\n        {\n          directoryShortid: undefined,\n          title: \"world\",\n          shortid: \"dir1\",\n        },\n        {\n          directoryShortid: \"dir1\",\n          title: \"hello\",\n          shortid: \"dir2\",\n        },\n      ];\n\n      expect(getDirectoryPaths(existingDirs)).toMatchSnapshot();\n    });\n  });\n});\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/utils/extract-requires.ts",
    "content": "import * as acorn from \"acorn\";\nimport * as babel from \"@babel/core\";\nimport traverse from \"@babel/traverse\";\nimport { ImportDeclaration, CallExpression, Literal } from \"estree\";\nconst walk = require(\"acorn/dist/walk\");\n\nrequire(\"acorn-dynamic-import/lib/inject\").default(acorn);\nrequire(\"acorn-jsx/inject\")(acorn);\nrequire(\"acorn-object-spread/inject\")(acorn);\n\nconst ECMA_VERSION = 2017;\n\nconst config = {\n  presets: [require(\"babel-preset-env\"), require(\"babel-preset-react\")],\n  plugins: [\n    require(\"babel-plugin-transform-async-to-generator\"),\n    require(\"babel-plugin-transform-object-rest-spread\"),\n    require(\"babel-plugin-transform-class-properties\"),\n    require(\"babel-plugin-transform-decorators-legacy\").default,\n    require(\"babel-plugin-dynamic-import-node\").default,\n  ],\n};\n\nexport default function exportRequires(code: string) {\n  const requires: string[] = [];\n  try {\n    const { ast } = babel.transformSync(code, config)!;\n\n    if (ast) {\n      traverse(ast, {\n        enter(path: any) {\n          if (\n            path.node.type === \"CallExpression\" &&\n            path.node.callee.name === \"require\" &&\n            path.node.arguments[0]\n          ) {\n            if (path.node.arguments[0].type === \"StringLiteral\") {\n              requires.push(path.node.arguments[0].value);\n            }\n          }\n        },\n      });\n    }\n  } catch (e) {\n    console.error(e);\n  }\n\n  return requires;\n}\n"
  },
  {
    "path": "packages/import-utils/src/create-sandbox/utils/resolve.ts",
    "content": "import { ISandboxDirectory } from \"codesandbox-import-util-types\";\n\nexport function getDirectoryPaths(directories: ISandboxDirectory[]) {\n  let paths: { [p: string]: ISandboxDirectory } = {};\n\n  const addDirectory = (\n    prevPath: string,\n    directoryShortid: string | undefined\n  ) => {\n    const dirs = directories.filter(\n      (d) => d.directoryShortid === directoryShortid\n    );\n\n    dirs.forEach((dir) => {\n      const dirPath = prevPath + \"/\" + dir.title;\n      paths[dirPath] = dir;\n      addDirectory(dirPath, dir.shortid);\n    });\n  };\n\n  directories\n    .filter((x) => x.directoryShortid == null)\n    .forEach((dir) => {\n      paths[\"/\" + dir.title] = dir;\n\n      addDirectory(\"/\" + dir.title, dir.shortid);\n    });\n\n  return paths;\n}\n"
  },
  {
    "path": "packages/import-utils/src/index.ts",
    "content": "// stub\n"
  },
  {
    "path": "packages/import-utils/src/is-text.ts",
    "content": "import { isText as _isText } from \"istextorbinary\";\n\nconst jsRegex = /(t|j)sx?$/i;\n\nconst FILE_LOADER_REGEX =\n  /\\.(ico|jpg|png|gif|eot|otf|webp|ttf|woff|woff2|mp4|webm)(\\?.*)?$/i;\nexport const MAX_FILE_SIZE = 3 * 1024 * 1024; // 3 MB\n\nexport const isText = (filename: string, buffer: Buffer) => {\n  if (jsRegex.test(filename)) {\n    return true;\n  }\n\n  // We don't support null bytes in the database with postgres,\n  // so we need to mark it as binary if there are null bytes\n  const hasNullByte = buffer.toString().includes(\"\\0\");\n  return (\n    _isText(filename, buffer) &&\n    !FILE_LOADER_REGEX.test(filename) &&\n    !isTooBig(buffer) &&\n    !hasNullByte\n  );\n};\n\nexport const isTooBig = (buffer: Buffer) => {\n  return buffer.length > MAX_FILE_SIZE;\n};\n"
  },
  {
    "path": "packages/import-utils/src/utils/files/__tests__/__snapshots__/denormalize.test.ts.snap",
    "content": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n\nexports[`denormalize can create nested directories 1`] = `\n{\n  \"directories\": [\n    {\n      \"directoryShortid\": \"GXOoy\",\n      \"shortid\": \"0\",\n      \"title\": \"test\",\n    },\n  ],\n  \"modules\": [\n    {\n      \"code\": \"\",\n      \"directoryShortid\": \"0\",\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"1\",\n      \"title\": \"new-file.js\",\n      \"uploadId\": undefined,\n    },\n  ],\n}\n`;\n\nexports[`denormalize can create only directory 1`] = `\n{\n  \"directories\": [\n    {\n      \"directoryShortid\": \"GXOoy\",\n      \"shortid\": \"0\",\n      \"title\": \"test\",\n    },\n    {\n      \"directoryShortid\": \"0\",\n      \"shortid\": \"1\",\n      \"title\": \"test2\",\n    },\n  ],\n  \"modules\": [],\n}\n`;\n\nexports[`denormalize can denormalize 1`] = `\n{\n  \"directories\": [\n    {\n      \"directoryShortid\": undefined,\n      \"shortid\": \"0\",\n      \"title\": \"world\",\n    },\n    {\n      \"directoryShortid\": \"0\",\n      \"shortid\": \"1\",\n      \"title\": \"hello\",\n    },\n  ],\n  \"modules\": [\n    {\n      \"code\": \"hello\",\n      \"directoryShortid\": undefined,\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"2\",\n      \"title\": \"index.js\",\n      \"uploadId\": undefined,\n    },\n    {\n      \"code\": \"hello2\",\n      \"directoryShortid\": \"0\",\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"3\",\n      \"title\": \"index.js\",\n      \"uploadId\": undefined,\n    },\n    {\n      \"code\": \"hello3\",\n      \"directoryShortid\": \"1\",\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"4\",\n      \"title\": \"index.js\",\n      \"uploadId\": \"123\",\n    },\n  ],\n}\n`;\n\nexports[`denormalize can filter out existing directories 1`] = `\n{\n  \"directories\": [],\n  \"modules\": [\n    {\n      \"code\": \"hello\",\n      \"directoryShortid\": undefined,\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"0\",\n      \"title\": \"index.js\",\n      \"uploadId\": undefined,\n    },\n    {\n      \"code\": \"hello2\",\n      \"directoryShortid\": \"dir1\",\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"1\",\n      \"title\": \"index.js\",\n      \"uploadId\": undefined,\n    },\n    {\n      \"code\": \"hello3\",\n      \"directoryShortid\": \"dir2\",\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"2\",\n      \"title\": \"index.js\",\n      \"uploadId\": undefined,\n    },\n    {\n      \"code\": \"hello4\",\n      \"directoryShortid\": \"dir4\",\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"3\",\n      \"title\": \"template.md\",\n      \"uploadId\": undefined,\n    },\n    {\n      \"code\": \"hello5\",\n      \"directoryShortid\": \"dir5\",\n      \"isBinary\": false,\n      \"sha\": undefined,\n      \"shortid\": \"4\",\n      \"title\": \"config.yml\",\n      \"uploadId\": undefined,\n    },\n  ],\n}\n`;\n"
  },
  {
    "path": "packages/import-utils/src/utils/files/__tests__/denormalize.test.ts",
    "content": "import denormalize from \"../denormalize\";\n\nlet count = 0;\n\njest.mock(\"shortid\", () => ({\n  generate: () => \"\" + count++,\n}));\n\ndescribe(\"denormalize\", () => {\n  beforeEach(() => {\n    count = 0;\n  });\n\n  it(\"can denormalize\", () => {\n    const paths = {\n      \"/index.js\": { content: \"hello\", isBinary: false },\n      \"/world/index.js\": { content: \"hello2\", isBinary: false },\n      \"/world/hello/index.js\": {\n        content: \"hello3\",\n        isBinary: false,\n        uploadId: \"123\",\n      },\n    };\n\n    expect(denormalize(paths)).toMatchSnapshot();\n  });\n\n  it(\"can denormalize with and without leading slash\", () => {\n    const paths = {\n      \"index.js\": { content: \"hello\", isBinary: false },\n      \"world/index.js\": { content: \"hello2\", isBinary: false },\n      \"world/hello/index.js\": { content: \"hello3\", isBinary: false },\n    };\n\n    const slashPaths = {\n      \"/index.js\": { content: \"hello\", isBinary: false },\n      \"/world/index.js\": { content: \"hello2\", isBinary: false },\n      \"/world/hello/index.js\": { content: \"hello3\", isBinary: false },\n    };\n\n    const firstDenormalize = denormalize(paths);\n    count = 0;\n\n    const secondDenormalize = denormalize(slashPaths);\n\n    expect(firstDenormalize).toEqual(secondDenormalize);\n  });\n\n  it(\"can filter out existing directories\", () => {\n    const paths = {\n      \"index.js\": { content: \"hello\", isBinary: false },\n      \"world/index.js\": { content: \"hello2\", isBinary: false },\n      \"world/hello/index.js\": { content: \"hello3\", isBinary: false },\n      \".github/ISSUE_TEMPLATES/template.md\": {\n        content: \"hello4\",\n        isBinary: false,\n      },\n      \".github/workflows/config.yml\": { content: \"hello5\", isBinary: false },\n    };\n\n    const existingDirs = [\n      {\n        directoryShortid: undefined,\n        title: \"world\",\n        shortid: \"dir1\",\n      },\n      {\n        directoryShortid: \"dir1\",\n        title: \"hello\",\n        shortid: \"dir2\",\n      },\n      {\n        directoryShortid: undefined,\n        title: \".github\",\n        shortid: \"dir3\",\n      },\n      {\n        directoryShortid: \"dir3\",\n        title: \"ISSUE_TEMPLATES\",\n        shortid: \"dir4\",\n      },\n      {\n        directoryShortid: \"dir3\",\n        title: \"workflows\",\n        shortid: \"dir5\",\n      },\n    ];\n\n    const denormalized = denormalize(paths, existingDirs);\n\n    expect(denormalized).toMatchSnapshot();\n    expect(denormalized.directories).toEqual([]);\n  });\n\n  it(\"can create nested directories\", () => {\n    const paramFiles = {\n      \"/src/test/new-file.js\": { isBinary: false, content: \"\" },\n    };\n    const existingDirs = [\n      {\n        directoryShortid: null,\n        shortid: \"rgkK4\",\n        title: \"public\",\n      },\n      {\n        directoryShortid: null,\n        shortid: \"GXOoy\",\n        title: \"src\",\n      },\n    ];\n    const denormalized = denormalize(paramFiles, existingDirs);\n\n    expect(denormalized).toMatchSnapshot();\n  });\n\n  it(\"can create only directory\", () => {\n    const paramFiles: { \"/src/test/test2\": { type: \"directory\" } } = {\n      \"/src/test/test2\": { type: \"directory\" },\n    };\n    const existingDirs = [\n      {\n        directoryShortid: null,\n        shortid: \"rgkK4\",\n        title: \"public\",\n      },\n      {\n        directoryShortid: null,\n        shortid: \"GXOoy\",\n        title: \"src\",\n      },\n    ];\n    const denormalized = denormalize(paramFiles, existingDirs);\n\n    expect(denormalized).toMatchSnapshot();\n  });\n});\n"
  },
  {
    "path": "packages/import-utils/src/utils/files/denormalize.ts",
    "content": "import { dirname, basename } from \"path\";\nimport {\n  INormalizedModules,\n  IModule,\n  ISandboxFile,\n  ISandboxDirectory,\n  IBinaryModule,\n} from \"codesandbox-import-util-types\";\n\nimport { generate as generateShortid } from \"shortid\";\nimport { getDirectoryPaths } from \"../../create-sandbox/utils/resolve\";\n\nfunction generateSandboxFile(\n  module: IModule | IBinaryModule,\n  path: string,\n  parentDirectoryShortid?: string\n): ISandboxFile {\n  const sandboxFile: ISandboxFile = {\n    shortid: generateShortid(),\n    code: module.content,\n    directoryShortid: parentDirectoryShortid,\n    title: basename(path),\n    uploadId: module.uploadId,\n    isBinary: module.isBinary,\n    sha: module.sha,\n  };\n\n  if (\"binaryContent\" in module) {\n    sandboxFile.binaryContent = module.binaryContent;\n  }\n\n  return sandboxFile;\n}\n\nfunction createDirectoryRecursively(\n  path: string,\n  directories: { [path: string]: ISandboxDirectory }\n) {\n  if (directories[path]) {\n    return directories[path];\n  }\n\n  const parentDir = dirname(path);\n\n  // This means root, so create it\n  if (parentDir === \".\") {\n    directories[path] = generateSandboxDirectory(path, undefined);\n    return;\n  }\n\n  if (!directories[parentDir]) {\n    createDirectoryRecursively(parentDir, directories);\n  }\n\n  directories[path] = generateSandboxDirectory(\n    basename(path),\n    directories[parentDir].shortid\n  );\n}\n\nfunction generateSandboxDirectory(\n  title: string,\n  parentDirectoryShortid?: string\n): ISandboxDirectory {\n  return {\n    shortid: generateShortid(),\n    directoryShortid: parentDirectoryShortid,\n    title,\n  };\n}\n\nexport default function denormalize(\n  paramFiles: INormalizedModules,\n  existingDirs: ISandboxDirectory[] = []\n) {\n  const existingDirPathsParams = getDirectoryPaths(existingDirs);\n\n  // Remove all leading slashes\n  let existingDirPaths: {\n    [p: string]: ISandboxDirectory;\n  } = {};\n  Object.keys(existingDirPathsParams).forEach((path) => {\n    existingDirPaths[path.replace(/^\\//, \"\")] = existingDirPathsParams[path];\n  });\n\n  let files: INormalizedModules = {};\n  Object.keys(paramFiles).forEach((path) => {\n    files[path.replace(/^\\//, \"\")] = paramFiles[path];\n  });\n\n  const directories: Set<string> = new Set();\n  Object.keys(files).forEach((path) => {\n    const dir = dirname(path);\n    if (dir !== \".\" && !existingDirPaths[\"/\" + dir]) {\n      directories.add(dirname(path));\n    }\n\n    const file = files[path];\n    if (file.type === \"directory\") {\n      directories.add(path);\n    }\n  });\n\n  const sandboxDirectories: {\n    [path: string]: ISandboxDirectory;\n  } = { ...existingDirPaths };\n  Array.from(directories).forEach((dirPath) => {\n    createDirectoryRecursively(dirPath, sandboxDirectories);\n  });\n\n  const sandboxModules: ISandboxFile[] = Object.keys(files)\n    .map((path) => {\n      const dir = sandboxDirectories[dirname(path)];\n      const parentShortid = dir ? dir.shortid : undefined;\n\n      const fileOrDirectory = files[path];\n\n      if (fileOrDirectory.type === \"directory\") {\n        return;\n      } else {\n        return generateSandboxFile(fileOrDirectory, path, parentShortid);\n      }\n    })\n    .filter((x): x is ISandboxFile => x !== undefined);\n\n  const dirs: unknown = Object.keys(sandboxDirectories)\n    .map((s) => !existingDirPaths[s] && sandboxDirectories[s])\n    .filter(Boolean);\n\n  return {\n    modules: sandboxModules,\n    directories: dirs as ISandboxDirectory[],\n  };\n}\n"
  },
  {
    "path": "packages/import-utils/src/utils/files/normalize.ts",
    "content": "import { join } from \"path\";\n\nimport {\n  ISandboxFile,\n  ISandboxDirectory,\n  INormalizedModules,\n} from \"codesandbox-import-util-types\";\n\nfunction findSandboxFiles(\n  modules: ISandboxFile[],\n  directories: ISandboxDirectory[],\n  currentDir: string | null,\n  path: string = \"\"\n): INormalizedModules {\n  let result: INormalizedModules = {};\n\n  const modulesInDirectory = modules.filter(\n    (m) => m.directoryShortid === currentDir\n  );\n\n  modulesInDirectory.forEach((m) => {\n    const newPath = join(path, m.title);\n\n    result[newPath] = { content: m.code || \"\", isBinary: m.isBinary };\n  });\n\n  const childrenFiles = directories\n    .filter((d) => d.directoryShortid === currentDir)\n    .forEach((dir) => {\n      const newPath = join(path, dir.title);\n      const dirResult = findSandboxFiles(\n        modules,\n        directories,\n        dir.shortid,\n        newPath\n      );\n\n      result = { ...result, ...dirResult };\n    });\n\n  return result;\n}\n\nexport default function normalizeSandboxFiles(\n  modules: ISandboxFile[],\n  directories: ISandboxDirectory[]\n): INormalizedModules {\n  return findSandboxFiles(modules, directories, null);\n}\n"
  },
  {
    "path": "packages/import-utils/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    /* Basic Options */\n    \"target\": \"es5\" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', or 'ESNEXT'. */,\n    \"module\": \"commonjs\" /* Specify module code generation: 'commonjs', 'amd', 'system', 'umd' or 'es2015'. */,\n    \"lib\": [\n      \"es5\",\n      \"es2015\",\n      \"dom\"\n    ] /* Specify library files to be included in the compilation:  */,\n    // \"allowJs\": true,                       /* Allow javascript files to be compiled. */\n    // \"checkJs\": true,                       /* Report errors in .js files. */\n    // \"jsx\": \"preserve\",                     /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */\n    \"declaration\": true /* Generates corresponding '.d.ts' file. */,\n    \"sourceMap\": true /* Generates corresponding '.map' file. */,\n    \"declarationMap\": true,\n    // \"outFile\": \"./\",                       /* Concatenate and emit output to single file. */\n    \"outDir\": \"./lib\" /* Redirect output structure to the directory. */,\n    // \"rootDir\": \"./\",                       /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */\n    // \"removeComments\": true,                /* Do not emit comments to output. */\n    // \"noEmit\": true,                        /* Do not emit outputs. */\n    // \"importHelpers\": true,                 /* Import emit helpers from 'tslib'. */\n    // \"downlevelIteration\": true,            /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */\n    // \"isolatedModules\": true,               /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */\n    /* Strict Type-Checking Options */\n    \"strict\": true /* Enable all strict type-checking options. */,\n    // \"noImplicitAny\": true,                 /* Raise error on expressions and declarations with an implied 'any' type. */\n    // \"strictNullChecks\": true,              /* Enable strict null checks. */\n    // \"noImplicitThis\": true,                /* Raise error on 'this' expressions with an implied 'any' type. */\n    // \"alwaysStrict\": true,                  /* Parse in strict mode and emit \"use strict\" for each source file. */\n    /* Additional Checks */\n    // \"noUnusedLocals\": true,                /* Report errors on unused locals. */\n    // \"noUnusedParameters\": true,            /* Report errors on unused parameters. */\n    // \"noImplicitReturns\": true,             /* Report error when not all code paths in function return a value. */\n    // \"noFallthroughCasesInSwitch\": true,    /* Report errors for fallthrough cases in switch statement. */\n    /* Module Resolution Options */\n    // \"moduleResolution\": \"node\",            /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */\n    // \"baseUrl\": \"./\",                       /* Base directory to resolve non-absolute module names. */\n    // \"paths\": {},                           /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */\n    // \"rootDirs\": [],                        /* List of root folders whose combined content represents the structure of the project at runtime. */\n    // \"typeRoots\": []                        /* List of folders to include type definitions from. */\n    // \"types\": []                            /* Type declaration files to be included in compilation. */\n    // \"allowSyntheticDefaultImports\": true,  /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */\n    /* Source Map Options */\n    // \"sourceRoot\": \"./\",                    /* Specify the location where debugger should locate TypeScript files instead of source locations. */\n    // \"mapRoot\": \"./\",                       /* Specify the location where debugger should locate map files instead of generated locations. */\n    // \"inlineSourceMap\": true,               /* Emit a single file with source maps instead of having a separate file. */\n    // \"inlineSources\": true,                 /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */\n    /* Experimental Options */\n    // \"experimentalDecorators\": true,        /* Enables experimental support for ES7 decorators. */\n    // \"emitDecoratorMetadata\": true,         /* Enables experimental support for emitting type metadata for decorators. */\n    \"skipLibCheck\": true,\n    \"useUnknownInCatchVariables\": false\n  },\n  \"include\": [\"src/**/*.ts\"],\n  \"exclude\": [\n    \"node_modules\",\n    \"build\",\n    \"**/*.test.ts\",\n    \"temp\",\n    \"**/__mocks__\",\n    \"**/__tests__\"\n  ]\n}\n"
  },
  {
    "path": "packages/types/LICENSE",
    "content": "                   GNU LESSER GENERAL PUBLIC LICENSE\n                       Version 3, 29 June 2007\n\nCopyright (C) 2018 CodeSandbox BV. <https://codesandbox.io/>\nEveryone is permitted to copy and distribute verbatim copies\nof this license document, but changing it is not allowed.\n\nThis version of the GNU Lesser General Public License incorporates\nthe terms and conditions of version 3 of the GNU General Public\nLicense, supplemented by the additional permissions listed below.\n\n0.  Additional Definitions.\n\nAs used herein, \"this License\" refers to version 3 of the GNU Lesser\nGeneral Public License, and the \"GNU GPL\" refers to version 3 of the GNU\nGeneral Public License.\n\n\"The Library\" refers to a covered work governed by this License,\nother than an Application or a Combined Work as defined below.\n\nAn \"Application\" is any work that makes use of an interface provided\nby the Library, but which is not otherwise based on the Library.\nDefining a subclass of a class defined by the Library is deemed a mode\nof using an interface provided by the Library.\n\nA \"Combined Work\" is a work produced by combining or linking an\nApplication with the Library. The particular version of the Library\nwith which the Combined Work was made is also called the \"Linked\nVersion\".\n\nThe \"Minimal Corresponding Source\" for a Combined Work means the\nCorresponding Source for the Combined Work, excluding any source code\nfor portions of the Combined Work that, considered in isolation, are\nbased on the Application, and not on the Linked Version.\n\nThe \"Corresponding Application Code\" for a Combined Work means the\nobject code and/or source code for the Application, including any data\nand utility programs needed for reproducing the Combined Work from the\nApplication, but excluding the System Libraries of the Combined Work.\n\n1.  Exception to Section 3 of the GNU GPL.\n\nYou may convey a covered work under sections 3 and 4 of this License\nwithout being bound by section 3 of the GNU GPL.\n\n2.  Conveying Modified Versions.\n\nIf you modify a copy of the Library, and, in your modifications, a\nfacility refers to a function or data to be supplied by an Application\nthat uses the facility (other than as an argument passed when the\nfacility is invoked), then you may convey a copy of the modified\nversion:\n\na) under this License, provided that you make a good faith effort to\nensure that, in the event an Application does not supply the\nfunction or data, the facility still operates, and performs\nwhatever part of its purpose remains meaningful, or\n\nb) under the GNU GPL, with none of the additional permissions of\nthis License applicable to that copy.\n\n3.  Object Code Incorporating Material from Library Header Files.\n\nThe object code form of an Application may incorporate material from\na header file that is part of the Library. You may convey such object\ncode under terms of your choice, provided that, if the incorporated\nmaterial is not limited to numerical parameters, data structure\nlayouts and accessors, or small macros, inline functions and templates\n(ten or fewer lines in length), you do both of the following:\n\na) Give prominent notice with each copy of the object code that the\nLibrary is used in it and that the Library and its use are\ncovered by this License.\n\nb) Accompany the object code with a copy of the GNU GPL and this license\ndocument.\n\n4.  Combined Works.\n\nYou may convey a Combined Work under terms of your choice that,\ntaken together, effectively do not restrict modification of the\nportions of the Library contained in the Combined Work and reverse\nengineering for debugging such modifications, if you also do each of\nthe following:\n\na) Give prominent notice with each copy of the Combined Work that\nthe Library is used in it and that the Library and its use are\ncovered by this License.\n\nb) Accompany the Combined Work with a copy of the GNU GPL and this license\ndocument.\n\nc) For a Combined Work that displays copyright notices during\nexecution, include the copyright notice for the Library among\nthese notices, as well as a reference directing the user to the\ncopies of the GNU GPL and this license document.\n\nd) Do one of the following:\n\n       0) Convey the Minimal Corresponding Source under the terms of this\n       License, and the Corresponding Application Code in a form\n       suitable for, and under terms that permit, the user to\n       recombine or relink the Application with a modified version of\n       the Linked Version to produce a modified Combined Work, in the\n       manner specified by section 6 of the GNU GPL for conveying\n       Corresponding Source.\n\n       1) Use a suitable shared library mechanism for linking with the\n       Library.  A suitable mechanism is one that (a) uses at run time\n       a copy of the Library already present on the user's computer\n       system, and (b) will operate properly with a modified version\n       of the Library that is interface-compatible with the Linked\n       Version.\n\ne) Provide Installation Information, but only if you would otherwise\nbe required to provide such information under section 6 of the\nGNU GPL, and only to the extent that such information is\nnecessary to install and execute a modified version of the\nCombined Work produced by recombining or relinking the\nApplication with a modified version of the Linked Version. (If\nyou use option 4d0, the Installation Information must accompany\nthe Minimal Corresponding Source and Corresponding Application\nCode. If you use option 4d1, you must provide the Installation\nInformation in the manner specified by section 6 of the GNU GPL\nfor conveying Corresponding Source.)\n\n5.  Combined Libraries.\n\nYou may place library facilities that are a work based on the\nLibrary side by side in a single library together with other library\nfacilities that are not Applications and are not covered by this\nLicense, and convey such a combined library under terms of your\nchoice, if you do both of the following:\n\na) Accompany the combined library with a copy of the same work based\non the Library, uncombined with any other library facilities,\nconveyed under the terms of this License.\n\nb) Give prominent notice with the combined library that part of it\nis a work based on the Library, and explaining where to find the\naccompanying uncombined form of the same work.\n\n6.  Revised Versions of the GNU Lesser General Public License.\n\nThe Free Software Foundation may publish revised and/or new versions\nof the GNU Lesser General Public License from time to time. Such new\nversions will be similar in spirit to the present version, but may\ndiffer in detail to address new problems or concerns.\n\nEach version is given a distinguishing version number. If the\nLibrary as you received it specifies that a certain numbered version\nof the GNU Lesser General Public License \"or any later version\"\napplies to it, you have the option of following the terms and\nconditions either of that published version or of any later version\npublished by the Free Software Foundation. If the Library as you\nreceived it does not specify a version number of the GNU Lesser\nGeneral Public License, you may choose any version of the GNU Lesser\nGeneral Public License ever published by the Free Software Foundation.\n\nIf the Library as you received it specifies that a proxy can decide\nwhether future versions of the GNU Lesser General Public License shall\napply, that proxy's public statement of acceptance of any version is\npermanent authorization for you to choose that version for the\nLibrary.\n"
  },
  {
    "path": "packages/types/index.d.ts",
    "content": "export interface IModule {\n  content: string; // If isBinary is true this will be a URL\n  isBinary: boolean;\n  type?: \"file\";\n  uploadId?: string;\n  sha?: string;\n}\n\nexport interface IBinaryModule extends IModule {\n  binaryContent: string;\n}\n\nexport interface IDirectory {\n  type: \"directory\";\n}\n\nexport interface INormalizedModules {\n  [path: string]: IModule | IBinaryModule | IDirectory;\n}\n\nexport interface ISandboxFile {\n  title: string;\n  code: string;\n  shortid: string;\n  isBinary: boolean;\n  binaryContent?: string;\n  uploadId?: string;\n  directoryShortid: string | undefined | null;\n  sha?: string;\n}\n\nexport interface ISandboxDirectory {\n  shortid: string;\n  title: string;\n  directoryShortid: string | undefined | null;\n}\n\nexport type ITemplate =\n  | \"adonis\"\n  | \"vue-cli\"\n  | \"preact-cli\"\n  | \"svelte\"\n  | \"create-react-app-typescript\"\n  | \"create-react-app\"\n  | \"angular-cli\"\n  | \"parcel\"\n  | \"@dojo/cli-create-app\"\n  | \"cxjs\"\n  | \"gatsby\"\n  | \"nuxt\"\n  | \"next\"\n  | \"reason\"\n  | \"apollo\"\n  | \"sapper\"\n  | \"ember\"\n  | \"nest\"\n  | \"static\"\n  | \"styleguidist\"\n  | \"gridsome\"\n  | \"vuepress\"\n  | \"mdx-deck\"\n  | \"quasar\"\n  | \"docusaurus\"\n  | \"remix\"\n  | \"node\";\n\nexport interface ISandbox {\n  title: string;\n  description: string;\n  tags: string[];\n  modules: ISandboxFile[];\n  directories: ISandboxDirectory[];\n  externalResources: string[];\n  template: ITemplate;\n  entry: string;\n  environmentVariables: Record<string, string>;\n  v2?: boolean;\n  templateParams?: {\n    iconUrl?: string;\n  };\n}\n"
  },
  {
    "path": "packages/types/package.json",
    "content": "{\n  \"name\": \"codesandbox-import-util-types\",\n  \"version\": \"2.2.3\",\n  \"gitHead\": \"3cdcdea389d39f2a92be73dcb73496f68c8ada41\"\n}\n"
  },
  {
    "path": "tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    /* Basic Options */\n    \"target\": \"es6\" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', or 'ESNEXT'. */,\n    \"module\": \"commonjs\" /* Specify module code generation: 'commonjs', 'amd', 'system', 'umd' or 'es2015'. */,\n    \"lib\": [\n      \"es2015\",\n      \"dom\"\n    ] /* Specify library files to be included in the compilation:  */,\n    // \"allowJs\": true,                       /* Allow javascript files to be compiled. */\n    // \"checkJs\": true,                       /* Report errors in .js files. */\n    // \"jsx\": \"preserve\",                     /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */\n    // \"declaration\": true,                   /* Generates corresponding '.d.ts' file. */\n    \"sourceMap\": true /* Generates corresponding '.map' file. */,\n    // \"outFile\": \"./\",                       /* Concatenate and emit output to single file. */\n    \"outDir\": \"./dist\" /* Redirect output structure to the directory. */,\n    // \"rootDir\": \"./\",                       /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */\n    // \"removeComments\": true,                /* Do not emit comments to output. */\n    // \"noEmit\": true,                        /* Do not emit outputs. */\n    // \"importHelpers\": true,                 /* Import emit helpers from 'tslib'. */\n    // \"downlevelIteration\": true,            /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */\n    // \"isolatedModules\": true,               /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */\n    /* Strict Type-Checking Options */\n    \"strict\": true /* Enable all strict type-checking options. */,\n    // \"noImplicitAny\": true,                 /* Raise error on expressions and declarations with an implied 'any' type. */\n    // \"strictNullChecks\": true,              /* Enable strict null checks. */\n    // \"noImplicitThis\": true,                /* Raise error on 'this' expressions with an implied 'any' type. */\n    // \"alwaysStrict\": true,                  /* Parse in strict mode and emit \"use strict\" for each source file. */\n    /* Additional Checks */\n    // \"noUnusedLocals\": true,                /* Report errors on unused locals. */\n    // \"noUnusedParameters\": true,            /* Report errors on unused parameters. */\n    // \"noImplicitReturns\": true,             /* Report error when not all code paths in function return a value. */\n    // \"noFallthroughCasesInSwitch\": true,    /* Report errors for fallthrough cases in switch statement. */\n    /* Module Resolution Options */\n    // \"moduleResolution\": \"node\",            /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */\n    // \"baseUrl\": \"./\",                       /* Base directory to resolve non-absolute module names. */\n    // \"paths\": {},                           /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */\n    // \"rootDirs\": [],                        /* List of root folders whose combined content represents the structure of the project at runtime. */\n    // \"typeRoots\": []                        /* List of folders to include type definitions from. */\n    // \"types\": []                            /* Type declaration files to be included in compilation. */\n    // \"allowSyntheticDefaultImports\": true,  /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */\n    /* Source Map Options */\n    // \"sourceRoot\": \"./\",                    /* Specify the location where debugger should locate TypeScript files instead of source locations. */\n    // \"mapRoot\": \"./\",                       /* Specify the location where debugger should locate map files instead of generated locations. */\n    // \"inlineSourceMap\": true,               /* Emit a single file with source maps instead of having a separate file. */\n    // \"inlineSources\": true,                 /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */\n    /* Experimental Options */\n    // \"experimentalDecorators\": true,        /* Enables experimental support for ES7 decorators. */\n    // \"emitDecoratorMetadata\": true,         /* Enables experimental support for emitting type metadata for decorators. */\n    \"skipLibCheck\": true,\n    \"useUnknownInCatchVariables\": false\n  },\n  \"include\": [\"src/**/*.ts\", \"typings/**/*.ts\"],\n  \"exclude\": [\"node_modules\", \"__tests__\", \"build\", \"**/*.test.ts\", \"temp\"]\n}\n"
  }
]