Repository: codesandbox/codesandbox-importers
Branch: master
Commit: d077bdf0d1a8
Files: 87
Total size: 159.8 KB
Directory structure:
gitextract__6b2bp8o/
├── .codesandbox/
│ └── tasks.json
├── .dockerignore
├── .eslintrc.js
├── .github/
│ └── workflows/
│ └── build-image.yml
├── .gitignore
├── .prettierrc
├── Dockerfile
├── LICENSE
├── catalog-info.yaml
├── lerna.json
├── package.json
├── packages/
│ ├── cli/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package.json
│ │ ├── src/
│ │ │ ├── api/
│ │ │ │ └── define.ts
│ │ │ ├── cfg.ts
│ │ │ ├── commands/
│ │ │ │ ├── deploy.ts
│ │ │ │ ├── login.ts
│ │ │ │ ├── logout.ts
│ │ │ │ └── token.ts
│ │ │ ├── github/
│ │ │ │ └── url.ts
│ │ │ ├── index.ts
│ │ │ └── utils/
│ │ │ ├── api.ts
│ │ │ ├── confirm.ts
│ │ │ ├── env.ts
│ │ │ ├── log.ts
│ │ │ ├── parse-sandbox/
│ │ │ │ ├── file-error.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── upload-files.ts
│ │ │ └── url.ts
│ │ ├── tsconfig.json
│ │ ├── tslint.json
│ │ └── typings/
│ │ └── extensions/
│ │ └── json.d.ts
│ ├── git-extractor/
│ │ ├── .gitignore
│ │ ├── config/
│ │ │ └── .gitkeep
│ │ ├── package.json
│ │ ├── src/
│ │ │ ├── index.ts
│ │ │ ├── middleware/
│ │ │ │ ├── appsignal.ts
│ │ │ │ ├── camelize.ts
│ │ │ │ ├── decamelize.ts
│ │ │ │ ├── error-handler.ts
│ │ │ │ ├── logger.ts
│ │ │ │ └── not-found.ts
│ │ │ ├── routes/
│ │ │ │ ├── define.test.ts
│ │ │ │ ├── define.ts
│ │ │ │ └── github/
│ │ │ │ ├── api.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── pull/
│ │ │ │ │ └── download.ts
│ │ │ │ ├── push/
│ │ │ │ │ ├── index.ts
│ │ │ │ │ └── utils/
│ │ │ │ │ ├── __tests__/
│ │ │ │ │ │ └── delta.test.ts
│ │ │ │ │ ├── create-blobs.ts
│ │ │ │ │ └── delta.ts
│ │ │ │ └── types.d.ts
│ │ │ └── utils/
│ │ │ ├── appsignal.ts
│ │ │ ├── delay.ts
│ │ │ ├── env.ts
│ │ │ └── log.ts
│ │ └── tsconfig.json
│ ├── hmaeo.yml
│ ├── import-utils/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── package.json
│ │ ├── src/
│ │ │ ├── api/
│ │ │ │ └── define.ts
│ │ │ ├── create-sandbox/
│ │ │ │ ├── __mocks__/
│ │ │ │ │ └── pacote.ts
│ │ │ │ ├── __tests__/
│ │ │ │ │ ├── __snapshots__/
│ │ │ │ │ │ └── html-parser.test.ts.snap
│ │ │ │ │ ├── html-parser.test.ts
│ │ │ │ │ └── templates.test.ts
│ │ │ │ ├── html-parser.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── templates.ts
│ │ │ │ └── utils/
│ │ │ │ ├── __tests__/
│ │ │ │ │ ├── __snapshots__/
│ │ │ │ │ │ └── resolve.test.ts.snap
│ │ │ │ │ ├── extract-requires.test.ts
│ │ │ │ │ └── resolve.test.ts
│ │ │ │ ├── extract-requires.ts
│ │ │ │ └── resolve.ts
│ │ │ ├── index.ts
│ │ │ ├── is-text.ts
│ │ │ └── utils/
│ │ │ └── files/
│ │ │ ├── __tests__/
│ │ │ │ ├── __snapshots__/
│ │ │ │ │ └── denormalize.test.ts.snap
│ │ │ │ └── denormalize.test.ts
│ │ │ ├── denormalize.ts
│ │ │ └── normalize.ts
│ │ └── tsconfig.json
│ └── types/
│ ├── LICENSE
│ ├── index.d.ts
│ └── package.json
└── tsconfig.json
================================================
FILE CONTENTS
================================================
================================================
FILE: .codesandbox/tasks.json
================================================
{
// These tasks will run in order when initializing your CodeSandbox project.
"setupTasks": [
{
"name": "Install Dependencies",
"command": "yarn install"
}
],
// These tasks can be run from CodeSandbox. Running one will open a log in the app.
"tasks": {
"build": {
"name": "build",
"command": "yarn build",
"runAtStart": false
},
"build:git-extractor": {
"name": "build:git-extractor",
"command": "yarn build:git-extractor",
"runAtStart": false
},
"build:dependencies": {
"name": "build:dependencies",
"command": "yarn build:dependencies",
"runAtStart": false
},
"test": {
"name": "test",
"command": "yarn test",
"runAtStart": false
},
"start": {
"name": "start",
"command": "yarn start",
"runAtStart": false
},
"dev": {
"name": "dev",
"command": "yarn dev",
"runAtStart": true,
"preview": {
"port": 2000
}
}
}
}
================================================
FILE: .dockerignore
================================================
.git
.gitignore
README.md
docker-compose.yml
node_modules
Dockerfile
dist
# Ignore generated credentials from google-github-actions/auth
gha-creds-*.json
================================================
FILE: .eslintrc.js
================================================
module.exports = {
extends: 'airbnb',
plugins: ['react', 'jest'],
};
================================================
FILE: .github/workflows/build-image.yml
================================================
name: ci
on:
push:
branches:
- "master"
jobs:
docker:
runs-on: ubuntu-latest
permissions:
contents: "read"
id-token: "write"
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: docker
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: codesandbox/importers
tags: |
type=sha,prefix=
- uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
================================================
FILE: .gitignore
================================================
.DS_Store
node_modules
npm-debug.log
yarn-error.log
build
public
temp
jest
.aws
dist
lerna-debug.log
================================================
FILE: .prettierrc
================================================
{}
================================================
FILE: Dockerfile
================================================
FROM node:19-alpine as build
WORKDIR /app
COPY . .
RUN yarn
RUN yarn build
USER node
CMD ["node", "./packages/git-extractor/dist/index.js"]
================================================
FILE: LICENSE
================================================
Copyright (c) Ives van Hoorne
Up is an Open Source project licensed under the terms of
the GPLv3 license. Please see <http://www.gnu.org/licenses/gpl-3.0.html>
for license text.
================================================
FILE: catalog-info.yaml
================================================
apiVersion: backstage.io/v1alpha1
kind: Component
metadata:
name: github-importer
description: All importers & exporters for CodeSandbox
annotations:
github.com/project-slug: codesandbox/codesandbox-importers
backstage.io/kubernetes-namespace: default
backstage.io/kubernetes-label-selector: app.kubernetes.io/name=codesandbox,component=github-importers
codesandbox/deploy-image: codesandbox/importers
codesandbox/deploy-gitops-repo: codesandbox/codesandbox-gitops
codesandbox/deploy-gitops-yaml-key: "githubImporters.image.tag"
codesandbox/deploy-image-tag-regex-production: ".*"
codesandbox/deploy-gitops-file-production: codesandbox-core/codesandbox/production/helm-chart-values/values.yaml
codesandbox/deploy-image-tag-regex-staging: ".*"
codesandbox/deploy-gitops-file-staging: codesandbox-core/codesandbox/staging/helm-chart-values/values.yaml
codesandbox/deploy-gitops-version-type: tag
spec:
type: service
lifecycle: production
owner: infra
================================================
FILE: lerna.json
================================================
{
"lerna": "2.4.0",
"packages": ["packages/*"],
"version": "2.2.3",
"npmClient": "yarn"
}
================================================
FILE: package.json
================================================
{
"name": "codesandbox-importers",
"version": "1.0.0",
"description": "",
"private": true,
"scripts": {
"build": "yarn build:dependencies && lerna run build --scope codesandbox --scope git-converter --parallel",
"build:git-extractor": "yarn build:dependencies && lerna run build --scope git-converter",
"build:dependencies": "lerna run build --scope codesandbox-import-utils --scope codesandbox-import-util-types --stream",
"test": "lerna run test",
"start": "lerna run start --stream",
"dev": "lerna run dev --stream"
},
"author": "",
"devDependencies": {
"jest": "^29.6.2",
"lerna": "^7.1.5",
"prettier": "^2.2.1",
"rimraf": "^2.6.2",
"ts-jest": "^29.1.1",
"typescript": "^4.3.0"
},
"workspaces": [
"packages/cli",
"packages/git-extractor",
"packages/import-utils",
"packages/types"
]
}
================================================
FILE: packages/cli/.gitignore
================================================
lib
================================================
FILE: packages/cli/LICENSE
================================================
Copyright (c) Ives van Hoorne
Up is an Open Source project licensed under the terms of
the GPLv3 license. Please see <http://www.gnu.org/licenses/gpl-3.0.html>
for license text.
================================================
FILE: packages/cli/README.md
================================================
# codesandbox-cli
> Upload your templates to codesandbox with a single command 🏖️
[](https://travis-ci.org/codesandbox/codesandbox-cli)
This is the command line interface for [CodeSandbox](https://codesandbox.io), an online editor
tailored for web applications.
## Quickstart
You can install the cli by running
```bash
# Install the cli
npm i -g codesandbox
# Go to your project
cd <path of your project>
# Deploy your project to CodeSandbox
codesandbox ./
```
## Future features
- Create a live connection with CodeSandbox using websockets so you can use your local editor
## Current limitations
- You need to be signed in to deploy, this is to prevent abuse
## Inspiration
I took a lot of inspiration from [now-cli](https://github.com/zeit/now-cli) and [preact-cli](https://github.com/developit/preact-cli) while building this.
================================================
FILE: packages/cli/package.json
================================================
{
"name": "codesandbox",
"version": "2.2.3",
"description": "The CLI used for communicating with CodeSandbox",
"main": "lib/index.js",
"bin": {
"codesandbox": "./lib/index.js"
},
"author": "Ives van Hoorne",
"license": "MIT",
"scripts": {
"test": "echo Done && exit 0",
"test:watch": "jest --watch",
"build": "rimraf lib && tsc -p tsconfig.json",
"watch": "tsc --watch -p tsconfig.json",
"prepublish": "yarn build"
},
"repository": "codesandbox-app/codesandbox-importers",
"files": [
"lib"
],
"keywords": [
"codesandbox",
"cli",
"editor"
],
"dependencies": {
"axios": "^1.6.0",
"chalk": "^2.4.1",
"codesandbox-import-util-types": "^2.2.3",
"codesandbox-import-utils": "^2.2.3",
"commander": "^2.9.0",
"datauri": "^3.0.0",
"filesize": "^3.6.1",
"fs-extra": "^3.0.1",
"git-branch": "^1.0.0",
"git-repo-name": "^0.6.0",
"git-username": "^0.5.0",
"humps": "^2.0.1",
"inquirer": "^8.2.4",
"lodash": "^4.17.5",
"lz-string": "^1.4.4",
"ms": "^2.0.0",
"open": "^6.3.0",
"ora": "^1.3.0",
"shortid": "^2.2.8",
"update-notifier": "^2.2.0"
},
"devDependencies": {
"@types/commander": "^2.9.1",
"@types/filesize": "^3.6.0",
"@types/fs-extra": "^3.0.3",
"@types/humps": "^1.1.2",
"@types/inquirer": "^0.0.35",
"@types/jest": "^20.0.2",
"@types/lodash": "^4.14.106",
"@types/lz-string": "^1.3.32",
"@types/ms": "^0.7.29",
"@types/node": "^14",
"@types/ora": "^0.3.31",
"@types/shortid": "^0.0.29",
"@types/update-notifier": "^1.0.1",
"filesize": "^3.6.1",
"rimraf": "^2.6.1",
"tslint": "^5.4.3",
"tslint-config-prettier": "^1.10.0"
},
"jest": {
"transform": {
".(ts|tsx)": "<rootDir>../../node_modules/ts-jest/preprocessor.js"
},
"testEnvironment": "node",
"moduleFileExtensions": [
"ts",
"tsx",
"js",
"json"
],
"testPathIgnorePatterns": [
"<rootDir>/node_modules/",
"<rootDir>/dist/"
],
"testRegex": "(/__tests__/.*|\\.(test|spec))\\.(ts|tsx|js)$"
},
"gitHead": "3cdcdea389d39f2a92be73dcb73496f68c8ada41"
}
================================================
FILE: packages/cli/src/api/define.ts
================================================
import { getParameters } from "codesandbox-import-utils/lib/api/define";
export { getParameters };
================================================
FILE: packages/cli/src/cfg.ts
================================================
import { homedir } from "os";
import * as fs from "fs-extra";
import * as path from "path";
import * as api from "./utils/api";
import { error } from "./utils/log";
import { IS_STAGING } from "./utils/env";
// tslint:disable no-var-requires
const ms = require("ms");
const TTL = ms("8h");
export interface IUser {
avatar_url: string;
email: string;
id: string;
name: string;
username: string;
jwt: string;
}
export interface IConfig {
[key: string]: any | undefined;
lastUpdate?: number;
user?: IUser;
}
const CONFIG_NAME = IS_STAGING
? ".codesandbox-staging.json"
: ".codesandbox.json";
const file = process.env.CODESANDBOX_JSON
? path.resolve(process.env.CODESANDBOX_JSON)
: path.resolve(homedir(), CONFIG_NAME);
/**
* Save config file
*
* @param {Object} data data to save
*/
async function save(data: object) {
await fs.writeFile(file, JSON.stringify(data, null, 2));
}
/**
* Load and parse config file
*/
export async function read(): Promise<IConfig> {
let existing: IConfig = {};
try {
const fileData = await (fs.readFile(file, "utf8") as Promise<string>);
existing = JSON.parse(fileData);
} catch (err) {
/* Do nothing */
}
if (!existing.token) {
return {};
}
if (!existing.lastUpdate || Date.now() - existing.lastUpdate > TTL) {
const token = existing.token;
try {
const user = await api.fetchUser(token);
if (user) {
existing = { ...existing, user, lastUpdate: Date.now() };
await save(existing);
} else {
await deleteUser();
}
} catch (e) {
error("Could not authorize the user.");
await deleteUser();
}
}
return existing;
}
// Removes a key from the config and store the result
export async function remove(key: string) {
const cfg = await read();
if (key in cfg) {
delete cfg[key];
}
await fs.writeFile(file, JSON.stringify(cfg, null, 2));
}
/**
* Merge the given data in the current config
* @param data
*/
export async function merge(data: object) {
const oldConfig = await read();
const cfg = { ...oldConfig, ...data };
await save(cfg);
return cfg;
}
/**
* Delete given user from config
*
* @export
*/
export async function deleteUser() {
await save({});
}
/**
* Save specific user in state
*
* @export
* @param {User} user
* @returns
*/
export function saveUser(token: string, user: IUser) {
return merge({ user, token, lastUpdate: Date.now() });
}
/**
* Gets user from config
*
* @export
* @returns
*/
export async function getUser(): Promise<IUser | undefined> {
const cfg = await read();
return cfg.user;
}
export async function getToken(): Promise<string | undefined> {
const cfg = await read();
return cfg.token;
}
export const removeFile = async () => fs.remove(file);
================================================
FILE: packages/cli/src/commands/deploy.ts
================================================
import chalk from "chalk";
import * as Commander from "commander";
import * as inquirer from "inquirer";
import * as filesize from "filesize";
import createSandbox from "codesandbox-import-utils/lib/create-sandbox";
import { join } from "path";
import { getUser } from "../cfg";
import { uploadSandbox } from "../utils/api";
import confirm from "../utils/confirm";
import { error, info, log, success } from "../utils/log";
import { createSandboxUrl } from "../utils/url";
import { login } from "./login";
import parseSandbox, { IUploads } from "../utils/parse-sandbox";
import FileError from "../utils/parse-sandbox/file-error";
import uploadFiles from "../utils/parse-sandbox/upload-files";
// tslint:disable no-var-requires
const ora = require("ora");
const MAX_MODULE_COUNT = 500;
const MAX_DIRECTORY_COUNT = 500;
/**
* Show warnings for the errors that occured during mapping of files, we
* still give the user to continue deployment without those files.
*
* @param {string} resolvedPath
* @param {FileError[]} errors
*/
async function showWarnings(resolvedPath: string, errors: FileError[]) {
if (errors.length > 0) {
console.log();
log(
chalk.yellow(
`There are ${chalk.bold(
errors.length.toString()
)} files that cannot be deployed:`
)
);
for (const err of errors) {
const relativePath = err.path.replace(resolvedPath, "");
log(`${chalk.yellow.bold(relativePath)}: ${err.message}`);
}
console.log();
}
}
async function showUploads(resolvedPath: string, uploads: IUploads) {
if (Object.keys(uploads).length > 0) {
console.log();
log(
chalk.blue(
`We will upload ${
Object.keys(uploads).length
} static files to your CodeSandbox upload storage:`
)
);
Object.keys(uploads).forEach((path) => {
const relativePath = path.replace(resolvedPath, "");
log(
`${chalk.yellow.bold(relativePath)}: ${filesize(
uploads[path].byteLength
)}`
);
});
console.log();
}
}
export default function registerCommand(program: typeof Commander) {
program
.command("deploy <path>")
.alias("*")
.description(
`deploy an application to CodeSandbox ${chalk.bold("(default)")}`
)
.action(async (path) => {
const user = await getUser();
if (!user) {
info("You need to sign in before you can deploy applications");
const confirmed = await confirm("Do you want to sign in using GitHub?");
if (!confirmed) {
return;
}
await login();
}
info(`Deploying ${path} to CodeSandbox`);
try {
let resolvedPath = join("./", path);
if (resolvedPath.endsWith("/")) {
resolvedPath = resolvedPath.slice(0, -1);
}
const fileData = await parseSandbox(resolvedPath);
// Show files that will be uploaded
await showUploads(resolvedPath, fileData.uploads);
// Show warnings for all errors
await showWarnings(resolvedPath, fileData.errors);
info(
"By deploying to CodeSandbox, the code of your project will be made " +
chalk.bold("public")
);
const acceptPublic = await confirm(
"Are you sure you want to proceed with the deployment?",
true
);
if (!acceptPublic) {
return;
}
let finalFiles = fileData.files;
const spinner = ora("").start();
if (Object.keys(fileData.uploads).length) {
spinner.text = "Uploading files to CodeSandbox";
const uploadedFiles = await uploadFiles(fileData.uploads);
finalFiles = { ...finalFiles, ...uploadedFiles };
}
const sandbox = await createSandbox(finalFiles);
if (sandbox.modules.length > MAX_MODULE_COUNT) {
throw new Error(
`This project is too big, it contains ${sandbox.modules.length} files which is more than the max of ${MAX_MODULE_COUNT}.`
);
}
if (sandbox.directories.length > MAX_DIRECTORY_COUNT) {
throw new Error(
`This project is too big, it contains ${sandbox.directories.length} directories which is more than the max of ${MAX_DIRECTORY_COUNT}.`
);
}
spinner.text = "Deploying to CodeSandbox";
try {
const sandboxData = await uploadSandbox(sandbox);
spinner.stop();
success(
"Succesfully created the sandbox, you can find the sandbox here:"
);
success(createSandboxUrl(sandboxData));
} catch (e) {
spinner.stop();
error("Something went wrong while uploading to the API");
error(e.message);
}
} catch (e) {
error(e.message);
}
});
}
================================================
FILE: packages/cli/src/commands/login.ts
================================================
import * as http from "http";
import * as inquirer from "inquirer";
import { omit } from "lodash";
import * as open from "open";
import ora = require("ora");
import * as cfg from "../cfg";
import * as api from "../utils/api";
import confirm from "../utils/confirm";
import { error, info } from "../utils/log";
import { LOGIN_URL as CLI_LOGIN_URL } from "../utils/url";
// TYPES
import * as Commander from "commander";
/**
* Start the sign in process by opening CodeSandbox CLI login url, this page
* will show a token that the user will have to fill in in the CLI
*
* @returns
*/
async function handleSignIn() {
// Open specific url
info(`Opening ${CLI_LOGIN_URL}`);
open(CLI_LOGIN_URL, { wait: false });
const { authToken } = await inquirer.prompt([
{
message: "Token:",
name: "authToken",
type: "input",
},
]);
// We got the token! Ask the server on authorization
const spinner = ora("Fetching user...").start();
try {
const { token, user } = await api.verifyUser(authToken);
// Save definite token and user to config
spinner.text = "Saving user...";
await cfg.saveUser(token, user);
spinner.stop();
return user;
} catch (e) {
spinner.stop();
throw e;
}
}
export async function login() {
info("We will open CodeSandbox and show an authorization token.");
info("You'll need enter this token in the CLI to sign in.");
const confirmed = await confirm(
"We will open CodeSandbox to finish the login process."
);
console.log();
if (confirmed) {
try {
const user = await handleSignIn();
info(`Succesfully signed in as ${user.username}!`);
} catch (e) {
error("Something went wrong while signing in: " + e.message);
}
}
}
export default function registerCLI(program: typeof Commander) {
program
.command("login")
.description("sign in to your CodeSandbox account or create a new one")
.option("-s", "don't ask for sign in if you're already signed in")
.action(async (cmd) => {
const user = await cfg.getUser();
const silent = !!cmd.S;
if (user) {
if (silent) {
return;
}
const confirmed = await confirm(
"You are already logged in, would you like to sign out first?"
);
if (confirmed) {
await cfg.deleteUser();
} else {
return;
}
}
await login();
});
}
================================================
FILE: packages/cli/src/commands/logout.ts
================================================
import * as Commander from "commander";
import { deleteUser, getUser } from "../cfg";
import confirm from "../utils/confirm";
import { error, info } from "../utils/log";
export default function registerCLI(program: typeof Commander) {
program
.command("logout")
.description("sign out from CodeSandbox")
.action(async () => {
const user = await getUser();
if (user) {
const confirmed = await confirm("Are you sure you want to log out?");
if (confirmed) {
await deleteUser();
info("Succesfully logged out");
}
} else {
error("You are already signed out");
}
});
}
================================================
FILE: packages/cli/src/commands/token.ts
================================================
import * as cfg from "../cfg";
// TYPES
import * as Commander from "commander";
export default function registerToken(program: typeof Commander) {
program
.command("token")
.description("get your login token to CodeSandbox")
.action(async () => {
const token = await cfg.getToken();
if (token === undefined) {
process.exit(1);
}
console.log(token);
});
}
================================================
FILE: packages/cli/src/github/url.ts
================================================
import * as fs from "fs";
import { basename, dirname } from "path";
/* tslint:disable no-var-requires */
const branch = require("git-branch");
const username = require("git-username");
const repoName = require("git-repo-name");
/* tslint:enable */
export interface IOptions {
examplePath?: string;
openedModule?: string;
fontSize?: number;
highlightedLines?: number[];
editorSize?: number;
view?: "editor" | "preview";
hideNavigation?: boolean;
currentModuleView?: boolean;
autoResize?: boolean;
useCodeMirror?: boolean;
enableEslint?: boolean;
forceRefresh?: boolean;
expandDevTools?: boolean;
initialPath?: string;
gitRepo?: string;
gitUsername?: string;
gitBranch?: string;
}
function optionsToParameterizedUrl(options: { [option: string]: any }) {
const keyValues = Object.keys(options)
.sort()
.filter((x) => options[x])
.map(
(key) => `${encodeURIComponent(key)}=${encodeURIComponent(options[key])}`
)
.join("&");
return keyValues ? `?${keyValues}` : "";
}
function getUrlOptions(options: IOptions) {
const {
view,
autoResize,
hideNavigation,
currentModuleView,
fontSize,
initialPath,
enableEslint,
useCodeMirror,
expandDevTools,
forceRefresh,
openedModule,
} = options;
const results: { [option: string]: any } = {};
results.module = openedModule;
results.view = view;
results.initialpath = initialPath;
if (autoResize) {
results.autoresize = 1;
}
if (hideNavigation) {
results.hidenavigation = 1;
}
if (currentModuleView) {
results.moduleview = 1;
}
if (enableEslint) {
results.eslint = 1;
}
if (expandDevTools) {
results.expanddevtools = 1;
}
if (useCodeMirror) {
results.codemirror = 1;
}
if (forceRefresh) {
results.forcerefresh = 1;
}
if (fontSize !== 14) {
results.fontsize = fontSize;
}
if (initialPath) {
results.initialpath = initialPath;
}
if (expandDevTools) {
results.expanddevtools = 1;
}
return optionsToParameterizedUrl(results);
}
const CODESANDBOX_ROOT = `https://codesandbox.io`;
function findGitRoot() {
let currentPath = __dirname;
while (
!fs.readdirSync(currentPath).find((f) => basename(f) === ".git") &&
currentPath !== "/"
) {
currentPath = dirname(currentPath);
}
if (currentPath === "/") {
throw new Error("Could not find .git folder");
}
return currentPath;
}
function getRepoPath(options: IOptions) {
const gitPath = findGitRoot();
let currentBranch;
let currentUsername;
const currentRepo = options.gitRepo || repoName.sync(gitPath);
// Check whether the build is happening on Netlify
if (process.env.REPOSITORY_URL) {
const usernameParts = process.env.REPOSITORY_URL.match(
/github.com[:|\/](.*)\/reactjs\.org/
);
if (usernameParts) {
currentUsername = usernameParts[1];
}
currentBranch = process.env.BRANCH;
} else {
currentBranch = branch.sync(gitPath);
currentUsername = username(gitPath);
}
currentBranch = currentBranch || options.gitBranch;
currentUsername = currentUsername || options.gitUsername;
if (!currentBranch) {
throw new Error("Could not fetch branch from the git info.");
}
if (!currentUsername) {
throw new Error("Could not fetch username from the git info.");
}
if (!currentRepo) {
throw new Error("Could not fetch repository from the git info.");
}
let path = `${currentUsername}/${currentRepo}/tree/${currentBranch}`;
if (options.examplePath) {
path += "/" + options.examplePath;
}
return path;
}
function getFullUrl(type: "s" | "embed", options: IOptions) {
const gitPath = getRepoPath(options);
const urlOptions = getUrlOptions(options);
return `${CODESANDBOX_ROOT}/${type}/github/${gitPath}${urlOptions}`;
}
export function getSandboxUrl(options?: IOptions) {
return getFullUrl("s", options || {});
}
export function getEmbedUrl(options?: IOptions) {
return getFullUrl("embed", options || {});
}
================================================
FILE: packages/cli/src/index.ts
================================================
#!/usr/bin/env node
import * as program from "commander";
import * as updateNotifier from "update-notifier";
// Commands
import deployCommand from "./commands/deploy";
import loginCommand from "./commands/login";
import logoutCommand from "./commands/logout";
import tokenCommand from "./commands/token";
import { extraHelp, logCodeSandbox } from "./utils/log";
// tslint:disable no-var-requires
const packageInfo = require("../package.json");
program.version(packageInfo.version);
program.on("--help", extraHelp);
// Register commands
deployCommand(program);
loginCommand(program);
tokenCommand(program);
logoutCommand(program);
program.parse(process.argv);
if (!process.argv.slice(2).length) {
console.log();
logCodeSandbox();
console.log();
program.outputHelp();
}
updateNotifier({ pkg: packageInfo }).notify();
================================================
FILE: packages/cli/src/utils/api.ts
================================================
import axios, { AxiosRequestConfig } from "axios";
import { ISandbox } from "codesandbox-import-util-types";
import { values } from "lodash";
import { decamelizeKeys } from "humps";
import { getToken } from "../cfg";
import {
CREATE_SANDBOX_URL,
GET_USER_URL,
verifyUserTokenUrl,
CREATE_UPLOAD_URL,
} from "./url";
// tslint:disable-next-line:no-var-requires
const DatauriParser = require("datauri/parser");
const callApi = async (options: AxiosRequestConfig) => {
try {
const response = await axios(options);
return response.data.data;
} catch (e) {
if (e.response && e.response.data && e.response.data.errors) {
e.message = values(e.response.data.errors)[0];
}
throw e;
}
};
export async function uploadSandbox(sandbox: ISandbox) {
const token = await getToken();
if (token == null) {
throw new Error("You're not signed in");
}
const sandboxData = {
...decamelizeKeys(sandbox),
from_cli: true,
};
const options: AxiosRequestConfig = {
data: {
sandbox: sandboxData,
},
headers: {
Authorization: `Bearer ${token}`,
},
method: "POST",
url: CREATE_SANDBOX_URL,
};
return callApi(options);
}
export async function fetchUser(token: string) {
const Authorization = `Bearer ${token}`;
const options: AxiosRequestConfig = {
headers: {
Authorization,
},
method: "GET",
url: GET_USER_URL,
};
return callApi(options);
}
export async function verifyUser(token: string) {
const options: AxiosRequestConfig = {
method: "GET",
url: verifyUserTokenUrl(token),
};
return callApi(options);
}
export async function createUpload(filename: string, buffer: Buffer) {
const parser = new DatauriParser();
parser.format(filename, buffer);
const uri = parser.content;
const token = await getToken();
if (token == null) {
throw new Error("You're not signed in");
}
const options: AxiosRequestConfig = {
data: {
name: filename,
content: uri,
},
headers: {
Authorization: `Bearer ${token}`,
},
method: "POST",
url: CREATE_UPLOAD_URL,
};
return callApi(options);
}
================================================
FILE: packages/cli/src/utils/confirm.ts
================================================
import * as inquirer from "inquirer";
export default async function confirm(question: string, defaultNo = false) {
const { confirmed } = await inquirer.prompt([
{
default: !defaultNo,
message: question,
name: "confirmed",
type: "confirm",
},
]);
return confirmed;
}
================================================
FILE: packages/cli/src/utils/env.ts
================================================
export const IS_STAGING = process.env.CODESANDBOX_NODE_ENV === "development";
================================================
FILE: packages/cli/src/utils/log.ts
================================================
import chalk from "chalk";
export function log(text = "") {
console.log(`> ${text}`);
}
export function logCodeSandbox() {
console.log(
` ${chalk.blue.bold("Code")}${chalk.yellow.bold("Sandbox")} ${chalk.bold(
"CLI"
)}`
);
console.log(" The official CLI for uploading projects to CodeSandbox");
}
export function extraHelp() {
console.log("");
console.log(" Notes:");
console.log();
console.log(" - You can only use the CLI if you are logged in");
console.log();
console.log(" Examples:");
console.log("");
console.log(chalk.gray(" Deploy current directory:"));
console.log();
console.log(" $ codesandbox ./");
console.log();
console.log(chalk.gray(" Deploy custom directory:"));
console.log();
console.log(" $ codesandbox /usr/src/project");
console.log("");
}
export function info(text: string) {
log(chalk.blue(text));
}
export function error(text: string) {
console.log();
log(chalk.red(`[error] ${text}`));
console.log();
}
export function warn(text: string) {
log(chalk.yellow(`[warn] ${text}`));
}
export function success(text: string) {
log(chalk.green(`[success] ${text}`));
}
================================================
FILE: packages/cli/src/utils/parse-sandbox/file-error.ts
================================================
export default class FileError extends Error {
public path: string;
public isBinary: boolean;
/**
* Creates an instance of FileError.
* @param {string} message
* @param {string} path
* @param {boolean} [isBinary=false] Whether the error was caused because the file is binary
* @memberof FileError
*/
constructor(message: string, path: string, isBinary = false) {
super(message);
this.path = path;
this.isBinary = isBinary;
}
}
================================================
FILE: packages/cli/src/utils/parse-sandbox/index.ts
================================================
import * as fs from "fs-extra";
import * as path from "path";
import { isText, isTooBig } from "codesandbox-import-utils/lib/is-text";
import { IModule, INormalizedModules } from "codesandbox-import-util-types";
import FileError from "./file-error";
const MAX_FILE_SIZE = 5 * 1024 * 1024;
export interface IUploads {
[path: string]: Buffer;
}
async function normalizeFilesInDirectory(
p: string,
startingPath: string
): Promise<{
errors: FileError[];
uploads: IUploads;
files: INormalizedModules;
}> {
const entries = await fs.readdir(p);
const dirs: string[] = [];
const files: string[] = [];
const errors: FileError[] = [];
let uploads: IUploads = {};
await Promise.all(
entries.map(async (e) => {
const absolutePath = path.join(p, e);
const stat = await fs.stat(absolutePath);
if (stat.isDirectory()) {
if (e !== "node_modules" && e !== ".git") {
dirs.push(absolutePath);
}
} else {
files.push(absolutePath);
}
})
);
const recursiveDirs: { [path: string]: IModule } = (
await Promise.all(
dirs.map((d) => normalizeFilesInDirectory(d, startingPath))
)
).reduce((prev, next) => {
next.errors.forEach((e) => {
errors.push(e);
});
uploads = { ...next.uploads, ...uploads };
return { ...prev, ...next.files };
}, {});
const fileData = (
await Promise.all(
files.map(async (t) => {
const code = await fs.readFile(t);
const relativePath = t.replace(startingPath + "/", "");
const isBinary = !(await isText(t, code));
if (isBinary) {
if (code.byteLength > MAX_FILE_SIZE) {
errors.push(
new FileError(
isTooBig(code) ? "Is too big" : "Is a binary file",
relativePath,
true
)
);
return false;
} else {
uploads[relativePath] = code;
return false;
}
}
return { path: relativePath, code: code.toString() };
})
)
).reduce((prev, next) => {
if (next === false) {
return prev;
}
return {
...prev,
[next.path]: { content: next.code },
};
}, {});
return { errors, uploads, files: { ...recursiveDirs, ...fileData } };
}
const exists = async (p: string) => {
try {
const stat = await fs.stat(p);
return true;
} catch (e) {
return false;
}
};
/**
* This will take a path and return all parameters that are relevant for the call
* to the CodeSandbox API fir creating a sandbox
*
* @export
* @param {string} path
*/
export default async function parseSandbox(resolvedPath: string) {
const dirExists = await exists(resolvedPath);
if (!dirExists) {
throw new Error(`The given path (${resolvedPath}) doesn't exist.`);
}
const fileData = await normalizeFilesInDirectory(resolvedPath, resolvedPath);
return fileData;
}
================================================
FILE: packages/cli/src/utils/parse-sandbox/upload-files.ts
================================================
import { IUploads } from ".";
import { createUpload } from "../api";
import { INormalizedModules } from "codesandbox-import-util-types";
export default async function uploadFiles(uploads: IUploads) {
const files: INormalizedModules = {};
const uploadPaths = Object.keys(uploads);
for (const uploadPath of uploadPaths) {
const buffer = uploads[uploadPath];
const res: { url: string } = await createUpload(uploadPath, buffer);
files[uploadPath] = {
content: res.url,
isBinary: true,
};
}
return files;
}
================================================
FILE: packages/cli/src/utils/url.ts
================================================
import { IS_STAGING } from "./env";
export const BASE_URL = IS_STAGING
? "https://codesandbox.stream"
: "https://codesandbox.io";
export const CREATE_SANDBOX_URL = BASE_URL + "/api/v1/sandboxes";
export const CREATE_UPLOAD_URL =
BASE_URL + "/api/v1/users/current_user/uploads";
export const GET_USER_URL = BASE_URL + "/api/v1/users/current";
export const LOGIN_URL = BASE_URL + "/cli/login";
const VERIFY_USER_TOKEN_URL = BASE_URL + "/api/v1/auth/verify/";
export const verifyUserTokenUrl = (token: string) =>
VERIFY_USER_TOKEN_URL + token;
export const createSandboxUrl = (sandbox: { id: string }) =>
BASE_URL + "/s/" + sandbox.id;
================================================
FILE: packages/cli/tsconfig.json
================================================
{
"compilerOptions": {
/* Basic Options */
"target": "es3" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'commonjs', 'amd', 'system', 'umd' or 'es2015'. */,
"lib": [
"es2015",
"es6",
"dom"
] /* Specify library files to be included in the compilation: */,
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
"declaration": true /* Generates corresponding '.d.ts' file. */,
"sourceMap": true /* Generates corresponding '.map' file. */,
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./lib" /* Redirect output structure to the directory. */,
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true /* Enable all strict type-checking options. */,
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [] /* List of folders to include type definitions from. */
// "types": [] /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
/* Source Map Options */
// "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
"skipLibCheck": true,
"useUnknownInCatchVariables": false
},
"include": ["src/**/*.ts", "node_modules/**/*.ts", "typings/**/*.ts"],
"exclude": ["__tests__", "build", "**/*.test.ts", "temp"]
}
================================================
FILE: packages/cli/tslint.json
================================================
{
"defaultSeverity": "error",
"extends": ["tslint:latest", "tslint-config-prettier"],
"jsRules": {},
"rules": {
"no-console": [false],
"no-implicit-dependencies": false,
"no-submodule-imports": false,
"ordered-imports": false,
"object-literal-sort-keys": false
},
"rulesDirectory": []
}
================================================
FILE: packages/cli/typings/extensions/json.d.ts
================================================
declare module "*.json" {
const package: {
name: string;
version: string;
};
export = package;
}
================================================
FILE: packages/git-extractor/.gitignore
================================================
config/production.json
================================================
FILE: packages/git-extractor/config/.gitkeep
================================================
================================================
FILE: packages/git-extractor/package.json
================================================
{
"name": "git-converter",
"version": "2.2.3",
"description": "",
"main": "index.js",
"private": true,
"scripts": {
"build": "rimraf dist && yarn compile",
"compile": "tsc",
"start": "node dist",
"dev": "nodemon --watch 'src/**/*.ts' --exec 'ts-node' src/index.ts",
"test": "jest",
"test:watch": "jest --watch"
},
"author": "",
"license": "ISC",
"devDependencies": {
"@types/acorn": "^4.0.2",
"@types/base-64": "^0.1.3",
"@types/debug": "^0.0.29",
"@types/humps": "^1.1.2",
"@types/jest": "^19.2.4",
"@types/jszip": "^3.4.1",
"@types/koa": "^2.0.39",
"@types/koa-bodyparser": "^3.0.25",
"@types/koa__router": "^8.0.3",
"@types/lodash.pickby": "^4.6.2",
"@types/lru-cache": "^4.1.0",
"@types/node": "^14",
"@types/node-fetch": "^1.6.7",
"@types/redis": "^2.6.0",
"@types/shortid": "^0.0.29",
"eslint": "^3.12.2",
"eslint-config-airbnb": "^13.0.0",
"eslint-plugin-import": "2.1.0",
"eslint-plugin-jsx-a11y": "2.2.3",
"eslint-plugin-react": "^6.8.0",
"nodemon": "^2.0.4",
"ts-node": "^8.10.1"
},
"dependencies": {
"@appsignal/nodejs": "^2.0.25",
"@babel/core": "^7.23.2",
"@babel/traverse": "^7.23.2",
"@koa/router": "^9.4.0",
"@sentry/node": "5.13.2",
"acorn": "^5.1.2",
"acorn-dynamic-import": "^2.0.2",
"acorn-jsx": "^4.0.1",
"acorn-object-spread": "LivelyKernel/acorn-object-spread",
"axios": "^1.6.0",
"babel-plugin-dynamic-import-node": "^1.1.0",
"babel-plugin-syntax-dynamic-import": "^6.18.0",
"babel-plugin-transform-async-to-generator": "^6.24.1",
"babel-plugin-transform-class-properties": "^6.24.1",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-object-rest-spread": "^6.26.0",
"babel-preset-env": "^1.6.0",
"babel-preset-react": "^6.24.1",
"babel-traverse": "^6.26.0",
"babylon": "^6.18.0",
"base-64": "^0.1.0",
"debug": "^2.6.8",
"envfile": "^7.1.0",
"gitignore-parser": "^0.0.2",
"humps": "CompuIves/humps",
"istextorbinary": "^6.0.0",
"jszip": "^3.5.0",
"koa": "^2.2.0",
"koa-bodyparser": "^4.2.0",
"lodash.pickby": "^4.6.0",
"lru-cache": "^4.1.1",
"node-fetch": "^2.6.7",
"redis": "^3.1.1",
"rimraf": "^2.6.1",
"shortid": "^2.2.8"
},
"jest": {
"transform": {
".(ts|tsx)": "<rootDir>../../node_modules/ts-jest/preprocessor.js"
},
"testEnvironment": "node",
"moduleFileExtensions": [
"ts",
"tsx",
"js",
"json"
],
"testPathIgnorePatterns": [
"<rootDir>/node_modules/",
"<rootDir>/dist/"
],
"testRegex": "(/__tests__/.*|\\.(test|spec))\\.(ts|tsx|js)$"
}
}
================================================
FILE: packages/git-extractor/src/index.ts
================================================
import * as Sentry from "@sentry/node";
import * as Koa from "koa";
import * as bodyParser from "koa-bodyparser";
import * as Router from "@koa/router";
import camelize from "./middleware/camelize";
import decamelize from "./middleware/decamelize";
import errorHandler from "./middleware/error-handler";
import appSignalMiddleware from "./middleware/appsignal";
// MIDDLEWARE
import logger from "./middleware/logger";
import notFound from "./middleware/not-found";
import * as define from "./routes/define";
// ROUTES
import * as github from "./routes/github";
import { appsignal } from "./utils/appsignal";
import log from "./utils/log";
Sentry.init({
dsn: "https://4917ce43c4ca42a1acb85b2843b79c6b@sentry.io/4377691",
});
const DEFAULT_PORT = process.env.PORT || 2000;
const app = new Koa();
const router = new Router();
app.use(errorHandler);
app.use(logger);
app.use(bodyParser({ jsonLimit: "50mb" }));
app.use(camelize);
app.use(decamelize);
app.use(notFound);
app.use(appSignalMiddleware);
router
.get(
"/git/github/data/:username/:repo/:branch*/commit/:commitSha/path/:path*",
github.data
)
.get("/git/github/rights/:username/:repo", github.getRights)
.get("/git/github/info/:username/:repo/tree/:branch/:path*", github.info) // allow tree urls
.get("/git/github/info/:username/:repo/blob/:branch/:path*", github.info) // allow blob urls
.get("/git/github/info/:username/:repo/commit/:branch", github.info) // allow commit urls
.get("/git/github/info/:username/:repo", github.info) // For when tree isn't in path (root path)
.get("/git/github/info/:username/:repo/pull/:pull", github.pullInfo) // allow pull urls
.post("/git/github/compare/:username/:repo", github.compare) // Compare changes between branches and commits
// Push
.post(
"/git/github/commit/:username/:repo/:branch*/path/:path*",
github.commit
)
.post("/git/github/pr/:username/:repo/:branch*/path/:path*", github.pr)
.post("/git/github/repo/:username/:repo", github.repo)
.post("/define", define.define);
app.use(router.routes()).use(router.allowedMethods());
log(`Listening on ${DEFAULT_PORT}`);
app.listen(DEFAULT_PORT);
console.log(
JSON.stringify({
message: `AppSignal ${appsignal.VERSION}, active: ${appsignal.isActive}`,
})
);
app.on("error", (err, ctx) => {
const span = appsignal.tracer().currentSpan();
if (span) {
span.addError(err);
}
Sentry.withScope(function (scope) {
scope.addEventProcessor(function (event) {
return Sentry.Handlers.parseRequest(event, ctx.request);
});
Sentry.captureException(err);
});
});
================================================
FILE: packages/git-extractor/src/middleware/appsignal.ts
================================================
import { Context } from "koa";
import { appsignal } from "../utils/appsignal";
const appSignal = async (ctx: Context, next: () => Promise<any>) => {
const tracer = appsignal.tracer();
const rootSpan = tracer.currentSpan();
if (!rootSpan) {
return next();
}
const { req, res } = ctx;
tracer.wrapEmitter(req);
tracer.wrapEmitter(res);
// identifies the span in the stacked graphs
rootSpan.setCategory("process_request.koa");
return tracer.withSpan(rootSpan, async (span) => {
try {
await next();
} finally {
const { method, params = {}, query = {}, routerPath } = ctx;
// set route params (if parsed by koa correctly)
span.setSampleData("params", { ...params, ...query });
if (routerPath) {
span.setName(`${method} ${routerPath}`);
}
span.close();
}
});
};
export default appSignal;
================================================
FILE: packages/git-extractor/src/middleware/camelize.ts
================================================
import { Context } from "koa";
import { camelizeKeys } from "humps";
const camelizeMiddleware = async (ctx: Context, next: () => Promise<any>) => {
if (ctx.request.body) {
const originalBody = ctx.request.body;
ctx.request.body = camelizeKeys(ctx.request.body);
// Don't camelize files object, because there will be paths
// with underscores and it's user input.
if (ctx.request.body.files) {
ctx.request.body.files = originalBody.files;
}
}
await next();
};
export default camelizeMiddleware;
================================================
FILE: packages/git-extractor/src/middleware/decamelize.ts
================================================
import { Context } from "koa";
import { decamelizeKeys } from "humps";
const decamelizeMiddleware = async (ctx: Context, next: () => Promise<any>) => {
await next();
ctx.body = decamelizeKeys(ctx.body as object[]);
};
export default decamelizeMiddleware;
================================================
FILE: packages/git-extractor/src/middleware/error-handler.ts
================================================
import { Context } from "koa";
// Error handler
const errorHandler = async (ctx: Context, next: () => Promise<any>) => {
try {
await next();
} catch (e) {
ctx.status = e.status || (e.response && e.response.status) || 500;
ctx.body = {
error:
e.response && e.response.data ? e.response.data.message : e.message,
};
if (e.response && e.response.data) {
console.log("ERROR: " + e.response.data.message);
}
ctx.app.emit("error", e, ctx);
}
};
export default errorHandler;
================================================
FILE: packages/git-extractor/src/middleware/logger.ts
================================================
import { Middleware, Context } from "koa";
interface ILogParams {
method: string;
url: string;
duration: number;
error?: string;
}
function log({ method, url, duration, error }: ILogParams) {
const log = {
method,
path: url,
duration,
error,
};
console.log(JSON.stringify(log));
}
const logger = async (ctx: Context, next: () => Promise<any>) => {
const start = +new Date();
try {
await next();
} catch (e) {
const ms = +new Date() - start;
log({ method: ctx.method, duration: ms, error: e.message, url: ctx.url });
throw e;
}
const ms = +new Date() - start;
log({ method: ctx.method, duration: ms, url: ctx.url });
};
export default logger;
================================================
FILE: packages/git-extractor/src/middleware/not-found.ts
================================================
import { Context } from "koa";
// Not found handler
const notFound = async (ctx: Context, next: () => Promise<any>) => {
await next();
if (ctx.status === 404) {
ctx.body = { error: "Page not found" };
}
};
export default notFound;
================================================
FILE: packages/git-extractor/src/routes/define.test.ts
================================================
import { createSandboxFromDefine } from "./define";
it("can infer title and description", async () => {
const payload = [
{
path: "package.json",
content: JSON.stringify({
title: "test",
description: "test description",
dependencies: {},
}),
isBinary: false,
},
];
const result = await createSandboxFromDefine(payload);
expect(result.title).toBe("test");
expect(result.description).toBe("test description");
});
it("works with leading slashes", async () => {
const payload = [
{
path: "/package.json",
content: JSON.stringify({
title: "test",
description: "test description",
dependencies: {},
}),
isBinary: false,
},
];
const result = await createSandboxFromDefine(payload);
expect(result.title).toBe("test");
expect(result.description).toBe("test description");
});
================================================
FILE: packages/git-extractor/src/routes/define.ts
================================================
import { Context } from "koa";
import createSandbox from "codesandbox-import-utils/lib/create-sandbox";
import {
INormalizedModules,
IModule,
ITemplate,
} from "codesandbox-import-util-types";
export const createSandboxFromDefine = async (
files: Array<IModule & { path: string }>
) => {
const normalizedFiles: INormalizedModules = files
.map((file) => {
if (file.path[0] === "/") {
// Remove the leading slash
const p = file.path.split("");
p.shift();
file.path = p.join("");
}
if (typeof file.content === "object") {
file.content = JSON.stringify(file.content, null, 2);
}
return file;
})
.reduce(
(total: INormalizedModules, next) => ({
...total,
[next.path]: next,
}),
{}
);
try {
const pkg = normalizedFiles["/package.json"];
if (pkg && pkg.type === "file") {
const parsed = JSON.parse(pkg.content);
console.log(
`Creating defined sandbox with ${JSON.stringify(
parsed.dependencies
)} deps, ${JSON.stringify(parsed.devDependencies)} devDeps.`
);
}
} catch (e) {
/* nothing */
}
return createSandbox(normalizedFiles);
};
export const define = async (ctx: Context, _next: () => Promise<any>) => {
const { files, template } = ctx.request.body;
const sandbox = await createSandboxFromDefine(files);
if (template) {
sandbox.template = template as ITemplate;
}
ctx.body = {
sandbox,
};
};
================================================
FILE: packages/git-extractor/src/routes/github/api.ts
================================================
import * as Sentry from "@sentry/node";
import axios, { AxiosPromise, AxiosRequestConfig } from "axios";
import * as zip from "jszip";
import * as LRU from "lru-cache";
import fetch from "node-fetch";
import { encode } from "base-64";
import { IGitInfo, ITree } from "./push";
import { appsignal } from "../../utils/appsignal";
const API_URL = "https://api.github.com";
const REPO_BASE_URL = API_URL + "/repos";
const GITHUB_CLIENT_ID = process.env.GITHUB_CLIENT_ID;
const GITHUB_CLIENT_SECRET = process.env.GITHUB_CLIENT_SECRET;
const NOT_FOUND_MESSAGE =
"Could not find the specified repository or directory";
function buildRepoApiUrl(username: string, repo: string) {
return `${REPO_BASE_URL}/${username}/${repo}`;
}
function buildPullApiUrl(username: string, repo: string, pull: number) {
return `${buildRepoApiUrl(username, repo)}/pulls/${pull}`;
}
function buildCommitApiUrl(username: string, repo: string, commitSha: string) {
return `${REPO_BASE_URL}/${username}/${repo}/commits/${commitSha}`;
}
function buildTreesApiUrl(username: string, repo: string, treeSha: string) {
return `${REPO_BASE_URL}/${username}/${repo}/git/trees/${treeSha}`;
}
function buildContentsApiUrl(username: string, repo: string, path: string) {
return `${REPO_BASE_URL}/${username}/${repo}/contents/${path}`;
}
function requestAxios(
requestName: string,
requestObject: AxiosRequestConfig
): AxiosPromise {
const tracer = appsignal.tracer();
const span = tracer.createSpan(undefined, tracer.currentSpan());
return tracer.withSpan(span, (span) => {
span.setCategory("request-api.github");
span.setName(requestName);
const meter = appsignal.metrics();
const snakeCaseRequestName = requestName.toLowerCase().replace(/\s/g, "_");
meter.incrementCounter(`github_request_${snakeCaseRequestName}`, 1);
// To keep track of how many binary files we are actually trying to request SHAs for
if (
snakeCaseRequestName === "checking_remaining_rate_limit" &&
requestObject?.params?.numberOfRequests
) {
meter.incrementCounter(
"number_of_binary_files",
requestObject.params.numberOfRequests
);
}
if (requestObject.auth) {
// In the case we're using not the user token, let's log that as well!
meter.incrementCounter(
`github_unauthorized_request_${snakeCaseRequestName}`,
1
);
}
return axios(requestObject)
.then((res) => {
span.close();
return res;
})
.catch((e) => {
span.addError(e);
span.close();
return Promise.reject(e);
});
});
}
function buildCompareApiUrl(
username: string,
repo: string,
baseRef: string,
headRef: string
) {
return `${buildRepoApiUrl(username, repo)}/compare/${baseRef}...${headRef}`;
}
function createAxiosRequestConfig(token?: string): AxiosRequestConfig {
const Accept = "application/vnd.github.v3+json";
return token
? {
headers: { Accept, Authorization: `Bearer ${token}` },
}
: {
auth: {
username: GITHUB_CLIENT_ID!,
password: GITHUB_CLIENT_SECRET!,
},
headers: { Accept },
};
}
function buildContentsUrl(
username: string,
repo: string,
branch: string,
path: string
) {
return `${buildRepoApiUrl(username, repo)}/contents/${path}?ref=${branch}`;
}
function buildCommitsUrl(
username: string,
repo: string,
branch: string,
path: string
) {
return `${buildRepoApiUrl(username, repo)}/commits/${branch}?path=${path}`;
}
function buildCommitsByPathUrl(
username: string,
repo: string,
branch: string,
path: string
) {
return `${buildRepoApiUrl(
username,
repo
)}/commits?sha=${branch}&path=${path}`;
}
interface IRepoResponse {
id: number;
node_id: string;
name: string;
full_name: string;
private: boolean;
default_branch: string;
}
interface ICompareResponse {
files: Array<{
sha: string;
filename: string;
status: "added" | "deleted";
additions: number;
deletions: number;
changes: number;
contents_url: string;
patch?: string;
}>;
base_commit: {
sha: string;
};
merge_base_commit: {
sha: string;
};
commits: Array<{ sha: string }>;
}
interface IContentResponse {
content: string;
encoding: "base64" | "utf-8" | "binary";
sha: string;
}
interface ICommitResponse {
commit: {
tree: {
sha: string;
};
};
}
interface IPrResponse {
number: number;
repo: string;
username: string;
branch: string;
state: string;
merged: boolean;
mergeable: boolean;
mergeable_state: string;
commitSha: string;
baseCommitSha: string;
rebaseable: boolean;
commits: number;
additions: number;
deletions: number;
changed_files: number;
}
interface IDeleteContentResponse {
commit: {
sha: string;
};
}
export async function getComparison(
username: string,
repo: string,
baseRef: string,
headRef: string,
token: string
) {
const url = buildCompareApiUrl(username, repo, baseRef, headRef);
const response: { data: ICompareResponse } = await requestAxios(
"Get Comparison",
{
url: encodeURI(url),
...createAxiosRequestConfig(token),
}
);
return response.data;
}
export async function getContent(url: string, token: string) {
const response: { data: IContentResponse } = await requestAxios(
"Get Content",
{
url: encodeURI(url),
...createAxiosRequestConfig(token),
}
);
return response.data;
}
type RepoInfoCache = {
etag: string;
response: IRepoResponse;
};
const repoInfoCache = new LRU<string, RepoInfoCache>({
max: 50 * 1024 * 1024, // 50 MB
});
export async function getRepo(username: string, repo: string, token?: string) {
const url = buildRepoApiUrl(username, repo);
const cacheIdentifier = [username, repo, token].filter(Boolean).join("::");
let etagCache: RepoInfoCache | undefined = repoInfoCache.get(cacheIdentifier);
const config = {
url: encodeURI(url),
...createAxiosRequestConfig(token),
};
if (etagCache) {
config.headers = config.headers = {};
config.headers["If-None-Match"] = etagCache.etag;
config.validateStatus = function (status: number) {
// Axios sees 304 (Not Modified) as an error. We don't want that.
return status < 400; // Reject only if the status code is greater than or equal to 400
};
}
const response: {
data: IRepoResponse;
status: number;
headers: any;
} = await requestAxios("Get Repo", config);
if (response.status === 304) {
return etagCache!.response;
} else {
const etag = response.headers.etag;
repoInfoCache.set(cacheIdentifier, {
etag,
response: response.data,
});
}
return response.data;
}
export async function getTreeWithDeletedFiles(
username: string,
repo: string,
treeSha: string,
deletedFiles: string[],
token: string,
path = []
) {
async function fetchTree(sha: string) {
const url = buildTreesApiUrl(username, repo, sha);
const response: { data: ITreeResponse } = await requestAxios("Get Tree", {
url: encodeURI(url),
...createAxiosRequestConfig(token),
});
return response.data.tree;
}
let tree = await fetchTree(treeSha);
return deletedFiles.reduce(
(aggr, file) =>
aggr.then(async (tree) => {
const parts = file.split("/");
parts.pop();
const dirs = parts.reduce<string[]>((aggr, part, index) => {
return aggr.concat(
aggr[index - 1] ? aggr[index - 1] + "/" + part : part
);
}, []);
const newTree = await dirs.reduce(
(subaggr, dir) =>
subaggr.then(async (tree) => {
const treeIndex = tree.findIndex(
(item) => item.type === "tree" && item.path === dir
);
if (treeIndex >= 0) {
const nestedTree = await fetchTree(tree[treeIndex].sha);
const newTree = tree.concat(
nestedTree.map((item) => ({
...item,
path: dir + "/" + item.path,
}))
);
newTree.splice(treeIndex, 1);
return newTree;
}
return tree;
}),
Promise.resolve(tree)
);
return newTree.filter((item) => item.path !== file);
}),
Promise.resolve(tree)
);
}
export async function getCommitTreeSha(
username: string,
repo: string,
commitSha: string,
token: string
) {
const url = buildCommitApiUrl(username, repo, commitSha);
const response: { data: ICommitResponse } = await requestAxios(
"Get CommitTreeSha",
{
url: encodeURI(url),
...createAxiosRequestConfig(token),
}
);
return response.data.commit.tree.sha;
}
export async function getLatestCommitShaOfFile(
username: string,
repo: string,
branch: string,
path: string,
token?: string
): Promise<string | undefined> {
const url = buildCommitsByPathUrl(username, repo, branch, path);
const response: { data: { sha: string }[] } = await requestAxios(
"Get Commits of File",
{
url: encodeURI(url),
...createAxiosRequestConfig(token),
}
);
if (response.data[0]) {
return response.data[0].sha;
}
return undefined;
}
export async function isRepoPrivate(
username: string,
repo: string,
token: string
) {
const data = await getRepo(username, repo, token);
return data.private;
}
interface RightsResponse {
permissions: {
admin: boolean;
push: boolean;
pull: boolean;
};
}
/**
* Fetch the permissions of a user on a specific repository.
*/
export async function fetchRights(
username: string,
repo: string,
token?: string
): Promise<"admin" | "write" | "read" | "none"> {
const url = buildRepoApiUrl(username, repo);
try {
const response: { data: RightsResponse } = await requestAxios(
"Get Rights",
{
url: encodeURI(url),
...createAxiosRequestConfig(token),
}
);
// No token
if (!response.data.permissions) {
return "none";
}
if (response.data.permissions.admin) {
return "admin";
}
if (response.data.permissions.push) {
return "write";
}
return "read";
} catch (e) {
if (
e.response &&
(e.response.status === 403 || e.response.status === 401)
) {
return "none";
} else {
throw e;
}
}
}
interface ITreeResponse {
sha: string;
tree: ITree;
truncated: boolean;
url: string;
}
interface IBlobResponse {
url: string;
sha: string;
}
export async function createPr(
base: {
username: string;
repo: string;
branch: string;
},
head: {
username: string;
repo: string;
branch: string;
},
title: string,
body: string,
token: string
): Promise<IPrResponse> {
const { data } = await requestAxios("Create PR", {
method: "post",
url: encodeURI(`${buildRepoApiUrl(base.username, base.repo)}/pulls`),
data: {
base: base.branch,
head: `${base.username === head.username ? "" : head.username + ":"}${
head.branch
}`,
title,
body,
maintainer_can_modify: true,
},
...createAxiosRequestConfig(token),
});
return {
number: data.number,
repo: data.head.repo.name,
username: data.head.repo.owner.login,
commitSha: data.head.sha,
branch: data.head.ref,
merged: data.merged,
state: data.state,
mergeable: data.mergeable,
mergeable_state: data.mergeable_state,
rebaseable: data.rebaseable,
additions: data.additions,
changed_files: data.changed_files,
commits: data.commits,
baseCommitSha: data.base.sha,
deletions: data.deletions,
};
}
export async function createBlob(
username: string,
repo: string,
content: string,
encoding: "utf-8" | "base64",
token: string
) {
const response: { data: IBlobResponse } = await requestAxios("Create Blob", {
method: "post",
url: encodeURI(`${buildRepoApiUrl(username, repo)}/git/blobs`),
data: { content: content, encoding },
...createAxiosRequestConfig(token),
});
return response.data;
}
interface ICreateTreeResponse {
sha: string;
url: string;
tree: ITree;
}
export async function createTree(
username: string,
repo: string,
tree: ITree,
baseTreeSha: string | null,
token: string
) {
const response: { data: ICreateTreeResponse } = await requestAxios(
"Create Tree",
{
method: "post",
url: encodeURI(`${buildRepoApiUrl(username, repo)}/git/trees`),
data: { base_tree: baseTreeSha, tree },
...createAxiosRequestConfig(token),
}
);
return response.data;
}
interface ICreateCommitResponse {
sha: string;
url: string;
author: {
date: string;
name: string;
email: string;
};
committer: {
date: string;
name: string;
email: string;
};
message: string;
}
/**
* Create a commit from the given tree
*/
export async function createCommit(
username: string,
repo: string,
treeSha: string,
parentCommitShas: string[],
message: string,
token: string
) {
const response: { data: ICreateCommitResponse } = await requestAxios(
"Create Commit",
{
method: "post",
url: encodeURI(`${buildRepoApiUrl(username, repo)}/git/commits`),
data: { tree: treeSha, message, parents: parentCommitShas },
...createAxiosRequestConfig(token),
}
);
return response.data;
}
interface IUpdateReferenceResponse {
ref: string;
url: string;
}
export async function updateReference(
username: string,
repo: string,
branch: string,
commitSha: string,
token: string
) {
const response: { data: IUpdateReferenceResponse } = await requestAxios(
"Update Reference",
{
method: "patch",
url: encodeURI(
`${buildRepoApiUrl(username, repo)}/git/refs/heads/${branch}`
),
data: { sha: commitSha, force: true },
...createAxiosRequestConfig(token),
}
);
return response.data;
}
interface ICreateReferenceResponse {
ref: string;
url: string;
object: {
type: string;
sha: string;
url: string;
};
}
export async function createReference(
username: string,
repo: string,
branch: string,
refSha: string,
token: string
) {
const response: {
data: ICreateReferenceResponse;
} = await requestAxios("Create Reference", {
method: "post",
url: encodeURI(`${buildRepoApiUrl(username, repo)}/git/refs`),
data: { ref: `refs/heads/${branch}`, sha: refSha },
...createAxiosRequestConfig(token),
});
return response.data;
}
interface ICreateForkResponse {
name: string;
full_name: string;
description: string;
private: boolean;
fork: boolean;
}
export async function createFork(
username: string,
repo: string,
token: string
) {
const response: { data: ICreateForkResponse } = await requestAxios(
"Create Fork",
{
method: "post",
url: encodeURI(`${buildRepoApiUrl(username, repo)}/forks`),
data: {},
...createAxiosRequestConfig(token),
}
);
return response.data;
}
interface ICreateRepoResponse {
name: string;
full_name: string;
description: string;
private: false;
fork: false;
url: string;
default_branch: string;
}
export async function getDefaultBranch(
username: string,
repo: string,
token?: string
) {
const data = await getRepo(username, repo, token);
return data.default_branch;
}
export async function createRepo(
username: string,
repo: string,
token: string,
privateRepo: boolean = false
) {
const repoExists = await doesRepoExist(username, repo, token);
if (repoExists) {
const error = new Error(
`The repository ${username}/${repo} already exists.`
);
// @ts-ignore
error.status = 422;
throw error;
}
const response: { data: ICreateRepoResponse } = await requestAxios(
"Create Repo",
{
method: "post",
url: encodeURI(`${API_URL}/user/repos`),
data: {
name: repo,
description: "Created with CodeSandbox",
homepage: `https://codesandbox.io/p/github/${username}/${repo}`,
auto_init: true,
private: privateRepo,
},
...createAxiosRequestConfig(token),
}
);
return response.data;
}
/**
* Check if repository exists
*/
export async function doesRepoExist(
username: string,
repo: string,
userToken?: string
) {
try {
await requestAxios("Repo Exists", {
method: "get",
url: encodeURI(buildRepoApiUrl(username, repo)),
...createAxiosRequestConfig(userToken),
});
return true;
} catch (e) {
if (e.response && e.response.status === 404) {
return false;
}
throw e;
}
}
interface CommitResponse {
commitSha: string;
username: string;
repo: string;
branch: string;
path: string;
}
const shaCache = new LRU({
max: 500,
maxAge: 1000 * 5, // 5 seconds
});
const etagCache = new LRU<string, { etag: string; sha: string }>({
max: 50000,
});
export function resetShaCache(gitInfo: IGitInfo) {
const { username, repo, branch, path = "" } = gitInfo;
return shaCache.del(username + repo + branch + path);
}
export async function fetchRepoInfo(
username: string,
repo: string,
branch: string,
path: string = "",
skipCache: boolean = false,
userToken?: string
): Promise<CommitResponse> {
let span;
try {
const cacheId = username + repo + branch + path;
// We cache the latest retrieved sha for a limited time, so we don't spam the
// GitHub API for every request
let latestSha = shaCache.get(cacheId) as string;
if (!latestSha || skipCache) {
const tracer = appsignal.tracer();
span = tracer.createSpan(undefined, tracer.currentSpan());
span.setCategory("request-api.github");
span.setName("GET api.github.com/info");
const url = buildCommitsUrl(username, repo, branch, path);
const headers: { "If-None-Match"?: string } = {};
const etagCacheResponse = etagCache.get(cacheId);
if (etagCacheResponse) {
// Use an ETag header so duplicate requests don't count towards the limit
headers["If-None-Match"] = etagCacheResponse.etag;
}
const defaultConfig = createAxiosRequestConfig(userToken);
const response = await requestAxios("Get Repo Info", {
url: encodeURI(url),
validateStatus: function (status) {
// Axios sees 304 (Not Modified) as an error. We don't want that.
return status < 400; // Reject only if the status code is greater than or equal to 400
},
...defaultConfig,
headers: {
...defaultConfig.headers,
...headers,
},
});
span.setSampleData("custom_data", {
etagCacheUsed: response.status === 304 && etagCacheResponse,
});
const meter = appsignal.metrics();
if (response.status === 304 && etagCacheResponse) {
meter.incrementCounter("github_cache_hit", 1);
latestSha = etagCacheResponse.sha;
} else {
meter.incrementCounter("github_cache_miss", 1);
latestSha = response.data.sha;
const etag = response.headers.etag;
// Only save towards the cache if there is no userToken. For people with a userToken
// we have 12k requests per hour to use. Won't hit that ever.
if (etag && !userToken) {
etagCache.set(cacheId, {
etag,
sha: response.data.sha,
});
}
}
shaCache.set(cacheId, latestSha);
}
return {
commitSha: latestSha,
username,
repo,
branch,
path,
};
} catch (e) {
// There is a chance that the branch contains slashes, we try to fix this
// by requesting again with the first part of the path appended to the branch
// when a request fails (404)
if (
e.response &&
(e.response.status === 404 || e.response.status === 422)
) {
const [branchAddition, ...newPath] = path.split("/");
const newBranch = `${branch}/${branchAddition}`;
if (branchAddition !== "") {
return await fetchRepoInfo(
username,
repo,
newBranch,
newPath.join("/"),
false,
userToken
);
}
e.message = NOT_FOUND_MESSAGE;
}
if (e.response && e.response.status === 403 && userToken == null) {
const meter = appsignal.metrics();
meter.incrementCounter("github_rate_limit", 1);
}
Sentry.captureException(e);
throw e;
} finally {
if (span) {
span.close();
}
}
}
export async function fetchPullInfo(
username: string,
repo: string,
pull: number,
userToken?: string
): Promise<IPrResponse> {
const url = buildPullApiUrl(username, repo, pull);
try {
const response = await requestAxios("Get Pull Info", {
url: encodeURI(url),
...createAxiosRequestConfig(userToken),
});
const data = response.data;
return {
number: data.head.number,
repo: data.head.repo.name,
username: data.head.repo.owner.login,
commitSha: data.head.sha,
branch: data.head.ref,
state: data.state,
merged: data.merged,
mergeable: data.mergeable,
mergeable_state: data.mergeable_state,
rebaseable: data.rebaseable,
additions: data.additions,
changed_files: data.changed_files,
commits: data.commits,
baseCommitSha: data.base.sha,
deletions: data.deletions,
};
} catch (e) {
e.message = "Could not find pull request information";
throw e;
}
}
const MAX_ZIP_SIZE = 128 * 1024 * 1024; // 128Mb
export async function downloadZip(
gitInfo: IGitInfo,
commitSha: string,
userToken?: string
) {
const repoUrl = buildRepoApiUrl(gitInfo.username, gitInfo.repo);
const url = encodeURI(`${repoUrl}/zipball/${commitSha}`);
const Accept = "application/vnd.github.v3+json";
const buffer: Buffer = await fetch(url, {
headers: {
Authorization: userToken
? `Bearer ${userToken}`
: `Basic ${encode(`${GITHUB_CLIENT_ID}:${GITHUB_CLIENT_SECRET}`)}`,
Accept,
},
}).then((res) => {
if (Number(res.headers.get("Content-Length")) > MAX_ZIP_SIZE) {
throw new Error("This repo is too big to import");
}
if (!res.ok) {
return res.text().then((text) => {
const error = new Error(
`Could not import repo from GitHub, error from GitHub. Status code: ${res.status}, error: ${text}`
);
// Forward the error status from GitHub, eg. if GH returns 404 we return that as well.
// This is handled in error-handler.ts middleware.
// @ts-ignore
error.status = res.status;
throw error;
});
} else {
return res.buffer();
}
});
const loadedZip = await zip.loadAsync(buffer);
return loadedZip;
}
export async function checkRemainingRateLimit(
numberOfRequests: number
): Promise<boolean> {
const url = "https://api.github.com/rate_limit";
const response: {
data: { resources: { core: { remaining: number } } };
} = await requestAxios("Checking Remaining Rate Limit", {
url: encodeURI(url),
params: {
numberOfRequests: numberOfRequests,
},
});
let remaining = 0;
if (response.data) {
remaining = response.data.resources.core.remaining;
}
return numberOfRequests < remaining;
}
================================================
FILE: packages/git-extractor/src/routes/github/index.ts
================================================
import * as Sentry from "@sentry/node";
import { IModule, INormalizedModules } from "codesandbox-import-util-types";
import createSandbox from "codesandbox-import-utils/lib/create-sandbox";
import { Context } from "koa";
import * as api from "./api";
import { getComparison } from "./api";
import { downloadRepository } from "./pull/download";
import * as push from "./push";
import { IChanges, IGitInfo } from "./push";
const getUserToken = (ctx: Context) => {
const header = ctx.header.authorization;
if (header) {
if (header.startsWith("Bearer ")) {
return header.replace("Bearer ", "");
}
return header;
}
return undefined;
};
export const info = async (ctx: Context, next: () => Promise<any>) => {
const userToken = getUserToken(ctx);
let branch = ctx.params.branch;
if (!branch) {
branch = await api.getDefaultBranch(
ctx.params.username,
ctx.params.repo,
userToken
);
}
const response = await api.fetchRepoInfo(
ctx.params.username,
ctx.params.repo,
branch,
ctx.params.path,
false,
userToken
);
ctx.body = response;
};
// We receive paths as "/src/index.js" and root path as "src", and Git takes
// "src/index.js", so we need to ensure we produce the correct paths
const changesWithRootPath = (changes: IChanges, rootPath = ""): IChanges => {
const convertPath = (path: string) => {
if (rootPath) {
return rootPath + path;
}
return path.substr(1);
};
return {
added: changes.added.map((change) => ({
...change,
path: convertPath(change.path),
})),
deleted: changes.deleted.map(convertPath),
modified: changes.modified.map((change) => ({
...change,
path: convertPath(change.path),
})),
};
};
export const pullInfo = async (ctx: Context, next: () => Promise<any>) => {
const userToken = getUserToken(ctx);
ctx.body = await api.fetchPullInfo(
ctx.params.username,
ctx.params.repo,
ctx.params.pull,
userToken
);
};
export const getRights = async (ctx: Context) => {
const userToken = getUserToken(ctx);
const rights = await api.fetchRights(
ctx.params.username,
ctx.params.repo,
userToken
);
ctx.body = {
permission: rights,
};
};
/**
* This route will take a github path and return sandbox data for it
*
* Data contains all files, directories and package.json info
*/
export const data = async (ctx: Context, next: () => Promise<any>) => {
try {
// We get branch, etc from here because there could be slashes in a branch name,
// we can retrieve if this is the case from this method
let { username, repo, branch, commitSha } = ctx.params;
const userToken = getUserToken(ctx);
Sentry.setContext("repo", {
username,
repo,
branch,
commitSha,
});
const path = ctx.params.path && ctx.params.path.replace("+", " ");
let title = `${username}/${repo}`;
if (path) {
const splittedPath = path.split("/");
title = title + `: ${splittedPath[splittedPath.length - 1]}`;
}
let isPrivate = false;
if (userToken) {
isPrivate = await api.isRepoPrivate(username, repo, userToken);
}
if (!branch) {
branch = await api.getDefaultBranch(username, repo, userToken);
}
const downloadedFiles = await downloadRepository(
{
username,
repo,
branch,
path,
},
commitSha,
isPrivate,
userToken
);
if (isPrivate) {
api.resetShaCache({ branch, username, repo, path });
}
console.log(
`Creating sandbox for ${username}/${repo}, branch: ${branch}, path: ${path}`
);
const sandboxParams = await createSandbox(downloadedFiles);
const finalTitle = sandboxParams.title || title;
ctx.body = {
...sandboxParams,
// If no title is set in package.json, go for this one
title: finalTitle,
// Privacy 2 is private, privacy 0 is public
privacy: isPrivate ? 2 : 0,
};
} catch (e) {
// Here we catch our false, preemptive rate limit and give it a proper error status code for the server.
if (
e.message == "Can't make axios requests, not enough rate limit remaining"
) {
ctx.body = {
error: "Can't make axios requests, not enough rate limit remaining",
};
ctx.status = 403;
} else {
throw e;
}
}
};
/*
Compares two refs on the repo
*/
export const compare = async (ctx: Context) => {
const { baseRef, headRef, token, includeContents } = ctx.request.body;
const { username, repo } = ctx.params;
const comparison = await getComparison(
username,
repo,
baseRef,
headRef,
token
);
if (includeContents) {
const files = await Promise.all(
comparison.files.map(
({
additions,
changes,
contents_url,
deletions,
filename,
status,
patch,
sha,
}) => {
return api.getContent(contents_url, token).then((content) => {
const data = content.content;
const buffer = Buffer.from(data, content.encoding);
let stringContent: string;
// If patch it is a text file, if not it is a binary
if (patch) {
stringContent = buffer.toString("utf-8");
} else {
// When we include binary files, we include them as base64. This will allow a "merge commit", related to
// a PR being out of sync with its source branch (ex. "master"), to add binary files
stringContent = buffer.toString("base64");
}
return {
additions,
changes,
deletions,
filename,
status,
content: stringContent,
isBinary: !patch,
};
});
}
)
);
ctx.body = {
files,
baseCommitSha: comparison.base_commit.sha,
headCommitSha: comparison.commits.length
? comparison.commits[comparison.commits.length - 1].sha
: comparison.merge_base_commit.sha,
};
} else {
ctx.body = {
files: comparison.files.map(
({ additions, status, filename, deletions, changes }) => ({
additions,
status,
filename,
deletions,
changes,
})
),
baseCommitSha: comparison.base_commit.sha,
headCommitSha: comparison.commits.length
? comparison.commits[0].sha
: comparison.merge_base_commit.sha,
};
}
};
export const pr = async (ctx: Context) => {
const {
changes,
title,
description,
commitSha,
currentUser,
token,
sandboxId,
}: {
changes: IChanges;
title: string;
description: string;
commitSha: string;
currentUser: string;
token: string;
sandboxId: string;
} = ctx.request.body;
const { username, repo, branch, path } = ctx.params;
let gitInfo: IGitInfo = {
username,
repo,
branch,
path,
};
const rights = await api.fetchRights(username, repo, token);
if (rights === "none" || rights === "read") {
// Ah, we need to fork...
gitInfo = await push.createFork(gitInfo, currentUser, token);
}
const commit = await push.createInitialCommit(
gitInfo,
changesWithRootPath(changes, path),
[commitSha],
token
);
const res = await push.createBranch(
gitInfo,
commit.sha,
token,
`csb-${sandboxId}`
);
const base = {
branch,
repo,
username,
};
const head = {
branch: res.branchName,
repo: gitInfo.repo,
username: gitInfo.username,
};
ctx.body = await api.createPr(base, head, title, description, token);
};
export const commit = async (ctx: Context) => {
const { parentCommitShas, changes, message, token } = ctx.request.body;
const { username, repo, branch, path } = ctx.params;
const gitInfo: IGitInfo = {
username,
repo,
branch,
path,
};
const commit = await push.createCommit(
gitInfo,
changesWithRootPath(changes, path),
parentCommitShas,
message,
token
);
await api.updateReference(username, repo, branch, commit.sha, token);
ctx.body = commit;
};
export const repo = async (ctx: Context, next: () => Promise<any>) => {
const {
token,
normalizedFiles: fileArray,
privateRepo,
}: {
token: string;
normalizedFiles: Array<IModule & { path: string }>;
privateRepo?: boolean;
} = ctx.request.body;
const { username, repo } = ctx.params;
const normalizedFiles: INormalizedModules = fileArray.reduce(
(total, file) => ({
...total,
[file.path]: file,
}),
{}
);
if (!repo) {
throw new Error("Repo name cannot be empty");
}
const result = await push.createRepo(
username,
repo,
normalizedFiles,
token,
privateRepo
);
ctx.body = result;
};
================================================
FILE: packages/git-extractor/src/routes/github/pull/download.ts
================================================
import * as JSZip from "jszip";
import { isText } from "codesandbox-import-utils/lib/is-text";
import { INormalizedModules } from "codesandbox-import-util-types";
import { IGitInfo } from "../push/index";
import {
downloadZip,
getLatestCommitShaOfFile,
checkRemainingRateLimit,
} from "../api";
const getFolderName = (zip: JSZip) =>
`${Object.keys(zip.files)[0].split("/")[0]}/`;
/**
* We use https://rawgit.com/ as urls, since they change the content-type corresponding
* to the file. Github always uses text/plain
*/
export const rawGitUrl = (
gitInfo: IGitInfo,
filePath: string,
commitSha?: string
) => {
let url = `https://rawcdn.githack.com/${gitInfo.username}/${gitInfo.repo}/${
commitSha || gitInfo.branch
}/`;
if (gitInfo.path) {
url += gitInfo.path + "/";
}
url += filePath;
return url;
};
export async function downloadRepository(
gitInfo: IGitInfo,
commitSha: string,
isPrivate: boolean,
userToken?: string
): Promise<INormalizedModules> {
const zip = await downloadZip(gitInfo, commitSha, userToken);
let folderName = getFolderName(zip);
if (gitInfo.path) {
folderName += gitInfo.path + "/";
}
const result: INormalizedModules = {};
const pathArray: string[] = [];
// First process non-binary files, and save paths of binary files to request
await Promise.all(
Object.keys(zip.files).map(async (path) => {
if (path.startsWith(folderName)) {
const relativePath = path.replace(folderName, "");
const file = zip.files[path];
if (!file.dir) {
const bufferContents = await file.async("nodebuffer");
const text = await isText(file.name, bufferContents);
if (!text) {
if (isPrivate) {
result[relativePath] = {
binaryContent: bufferContents.toString("base64"),
content: "",
isBinary: true,
};
} else {
pathArray.push(relativePath);
}
} else {
const contents = await file.async("text");
result[relativePath] = {
content: contents || "",
isBinary: false,
};
}
}
}
})
);
const requestsToMake = pathArray.length;
/**
* Check if there is enough of our CodeSandbox Github token rate limit left to be able to
* request all the files we need to. If there isn't, then we shouldn't make the Promise.all
* request because when the first 403 rate limit comes through, it rejects everything, and
* it wastes even more rate limit tries.
*/
if (!userToken) {
const canRequest = await checkRemainingRateLimit(requestsToMake);
if (!canRequest) {
throw new Error(
"Can't make axios requests, not enough rate limit remaining"
);
}
}
// Then we can request the SHAs of binary files if there is enough rate limit left.
await Promise.all(
pathArray.map(async (relativePath) => {
const fileSha = await getLatestCommitShaOfFile(
gitInfo.username,
gitInfo.repo,
gitInfo.branch,
relativePath,
userToken
);
result[relativePath] = {
content: rawGitUrl(gitInfo, relativePath, fileSha),
isBinary: true,
};
})
);
return result;
}
================================================
FILE: packages/git-extractor/src/routes/github/push/index.ts
================================================
import {
IBinaryModule,
IModule,
INormalizedModules,
} from "codesandbox-import-util-types";
import delay from "../../../utils/delay";
import * as api from "../api";
import { createBlobs } from "./utils/create-blobs";
export interface IGitInfo {
username: string;
repo: string;
branch: string;
path?: string;
}
export interface ITreeFile {
path: string;
mode: string;
type: string;
size: number;
sha: string;
url: string;
}
export interface IChanges {
added: Array<{
path: string;
content: string;
encoding: "base64" | "utf-8";
}>;
deleted: string[];
modified: Array<{
path: string;
content: string;
encoding: "base64" | "utf-8";
}>;
}
export type ITree = ITreeFile[];
function generateBranchName() {
const id = Date.now();
return `csb-${id}`;
}
export async function createBranch(
gitInfo: IGitInfo,
refSha: string,
userToken: string,
branchName: string = generateBranchName()
) {
const res = await api.createReference(
gitInfo.username,
gitInfo.repo,
branchName,
refSha,
userToken
);
return { url: res.url, ref: res.ref, branchName };
}
export async function createFork(
gitInfo: IGitInfo,
currentUser: string,
userToken: string
): Promise<IGitInfo> {
const forkGitInfo: IGitInfo = { ...gitInfo, username: currentUser };
const existingRepo = await api.doesRepoExist(
forkGitInfo.username,
forkGitInfo.repo
);
if (!existingRepo) {
await api.createFork(gitInfo.username, gitInfo.repo, userToken);
// Forking is asynchronous, so we need to poll for when the repo has been created
let repoExists = false;
let tryCount = 0;
while (!repoExists) {
tryCount++;
if (tryCount > 300) {
throw new Error(
"Forking repo takes longer than 5 minutes, try again later."
);
}
repoExists = await api.doesRepoExist(
forkGitInfo.username,
forkGitInfo.repo
);
await delay(1000);
}
}
return forkGitInfo;
}
export async function createInitialCommit(
gitInfo: IGitInfo,
changes: IChanges,
parentShas: string[],
userToken: string
) {
return createCommit(
gitInfo,
changes,
parentShas,
"initial commit",
userToken
);
}
export async function createCommit(
gitInfo: IGitInfo,
changes: IChanges,
parentShas: string[],
message: string,
userToken: string
) {
const { username, repo } = gitInfo;
let treeSha = await api.getCommitTreeSha(
username,
repo,
parentShas[0],
userToken
);
let tree: ITree = [];
if (
changes.added.length ||
changes.deleted.length ||
changes.modified.length
) {
if (changes.deleted.length) {
tree = await api.getTreeWithDeletedFiles(
username,
repo,
treeSha,
changes.deleted,
userToken
);
}
const createdBlobs = await createBlobs(
[...changes.modified, ...changes.added],
gitInfo,
userToken
);
const updatedTree = tree.concat(createdBlobs);
const treeResponse = await api.createTree(
username,
repo,
updatedTree,
changes.deleted.length ? null : treeSha,
userToken
);
treeSha = treeResponse.sha;
}
return await api.createCommit(
gitInfo.username,
gitInfo.repo,
treeSha,
parentShas,
message,
userToken
);
}
export async function createRepo(
username: string,
name: string,
sandboxFiles: INormalizedModules,
userToken: string,
privateRepo?: boolean
) {
await api.createRepo(username, name, userToken, privateRepo);
const latestData = await api.fetchRepoInfo(
username,
name,
"main",
"",
true,
userToken
);
const gitInfo: IGitInfo = {
username: latestData.username,
repo: latestData.repo,
branch: latestData.branch,
path: latestData.path,
};
const changes: IChanges = {
added: Object.keys(sandboxFiles)
.filter((path) => sandboxFiles[path].type !== "directory")
.map((path) => {
if ("binaryContent" in sandboxFiles[path]) {
const file = sandboxFiles[path] as IBinaryModule;
return {
content: file.binaryContent,
encoding: "base64",
path,
};
}
const file = sandboxFiles[path] as IModule;
return {
content: file.content,
encoding: file.isBinary ? "base64" : "utf-8",
path,
};
}),
deleted: [],
modified: [],
};
const commit = await createCommit(
gitInfo,
changes,
[latestData.commitSha],
"Initial commit",
userToken
);
const res = await api.updateReference(
username,
gitInfo.repo,
gitInfo.branch,
commit.sha,
userToken
);
api.resetShaCache(gitInfo);
return gitInfo;
}
================================================
FILE: packages/git-extractor/src/routes/github/push/utils/__tests__/delta.test.ts
================================================
import { INormalizedModules } from "codesandbox-import-util-types";
import getDelta from "../delta";
describe("commit", () => {
describe("delta", () => {
const SAMPLE_TREE = [
{
path: "src/App.css",
mode: "100644",
type: "blob",
sha: "15adfdc710ca89d2c427dcbb6716943e1029c73a",
size: 341,
url:
"https://api.github.com/repos/CompuIves/codesandbox-test-git-app/git/blobs/15adfdc710ca89d2c427dcbb6716943e1029c73a",
},
{
path: "src/App.js",
mode: "100644",
type: "blob",
sha: "d7d52a7f38a321668d4fa83409a7c47d1bfccd7c",
size: 496,
url:
"https://api.github.com/repos/CompuIves/codesandbox-test-git-app/git/blobs/d7d52a7f38a321668d4fa83409a7c47d1bfccd7c",
},
];
const SAMPLE_MODULES: INormalizedModules = {
"src/App.css": {
content: `.App {
text-align: center;
}
.App-logo {
animation: App-logo-spin infinite 20s linear;
height: 80px;
}
.App-header {
background-color: #222;
height: 150px;
padding: 20px;
color: white;
}
.App-intro {
font-size: large;
}
@keyframes App-logo-spin {
from { transform: rotate(0deg); }
to { transform: rotate(360deg); }
}
`,
isBinary: false,
},
"src/App.js": {
content: `import React, { Component } from 'react';
import logo from './logo.svg';
import './App.css';
class App extends Component {
render() {
return (
<div className="App">
<div className="App-header">
<img src={logo} className="App-logo" alt="logo" />
<h2>Welcome to React</h2>
</div>
<p className="App-intro">
To get started, edit <code>src/App.js</code> and save to reload.
</p>
</div>
);
}
}
export default App;
`,
isBinary: false,
},
};
it("detects no change", () => {
expect(getDelta(SAMPLE_TREE, SAMPLE_MODULES)).toEqual({
added: [],
deleted: [],
modified: [],
});
});
it("detects added files", () => {
const newModules = {
...SAMPLE_MODULES,
"test.js": { content: "Hey", isBinary: false },
};
expect(getDelta(SAMPLE_TREE, newModules)).toEqual({
added: ["test.js"],
deleted: [],
modified: [],
});
});
it("detects modified files", () => {
const newModules = {
...SAMPLE_MODULES,
"src/App.js": { content: "Hey", isBinary: false },
};
expect(getDelta(SAMPLE_TREE, newModules)).toEqual({
added: [],
deleted: [],
modified: ["src/App.js"],
});
});
it("detects deleted files", () => {
const newModules = {
...SAMPLE_MODULES,
"src/App.js": null,
};
expect(getDelta(SAMPLE_TREE, newModules)).toEqual({
added: [],
deleted: ["src/App.js"],
modified: [],
});
});
});
});
================================================
FILE: packages/git-extractor/src/routes/github/push/utils/create-blobs.ts
================================================
import { IModule, INormalizedModules } from "codesandbox-import-util-types";
import fetch from "node-fetch";
import { createBlob } from "../../api";
import { IGitInfo, ITree } from "../index";
async function downloadContent(module: IModule): Promise<string> {
if (!module.isBinary) {
return module.content;
}
return fetch(module.content)
.then((x) => x.buffer())
.then((buffer) => buffer.toString("base64"));
}
export async function createBlobs(
files: Array<{ path: string; content: string; encoding: "base64" | "utf-8" }>,
gitInfo: IGitInfo,
token: string
): Promise<ITree> {
return Promise.all(
files.map(async ({ path, content, encoding }) => {
const result = await createBlob(
gitInfo.username,
gitInfo.repo,
content,
encoding,
token
);
return {
path,
sha: result.sha,
size: content.length,
mode: "100644", // blob
type: "blob",
url: result.url,
};
})
);
}
================================================
FILE: packages/git-extractor/src/routes/github/push/utils/delta.ts
================================================
import { IModule, IDirectory } from "codesandbox-import-util-types";
import { ITree } from "../index";
import { createHash } from "crypto";
function getGitSha(content: string) {
const hash = createHash("sha1");
hash.update("blob " + new Buffer(content).length + "\0" + content);
return hash.digest("hex");
}
interface INormalizedAndDeletedModules {
[path: string]: IModule | IDirectory | null;
}
export default function getDelta(
tree: ITree,
modules: INormalizedAndDeletedModules
) {
const added: string[] = [];
const modified: string[] = [];
const deleted: string[] = [];
tree.forEach((file) => {
const equivalentModule = modules[file.path];
if (!equivalentModule) {
deleted.push(file.path);
} else {
if (equivalentModule.type === "directory" || equivalentModule.isBinary) {
return;
}
if (getGitSha(equivalentModule.content) !== file.sha) {
modified.push(file.path);
}
}
});
Object.keys(modules).forEach((path) => {
if (!tree.find((t) => t.path === path)) {
added.push(path);
}
});
return { added, modified, deleted };
}
================================================
FILE: packages/git-extractor/src/routes/github/types.d.ts
================================================
export type Module = {
name: string;
path: string;
sha: string;
size: number;
url: string;
html_url: string;
git_url: string;
download_url: string;
type: "file" | "dir";
};
export type NormalizedDirectory = {
path: string;
name: string;
files: Array<Module>;
directories: Array<NormalizedDirectory>;
};
export type DownloadedFile = Module & {
code: string;
isBinary: boolean;
};
================================================
FILE: packages/git-extractor/src/utils/appsignal.ts
================================================
import { Appsignal } from "@appsignal/nodejs";
export const appsignal = new Appsignal({
active: process.env.NODE_ENV === "production",
name: "Importers",
environment: String(process.env.ENVIRONMENT),
});
================================================
FILE: packages/git-extractor/src/utils/delay.ts
================================================
export default function delay(ms: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(() => resolve(), ms);
});
}
================================================
FILE: packages/git-extractor/src/utils/env.ts
================================================
export default process.env.NODE_ENV === "production"
? "production"
: "development";
================================================
FILE: packages/git-extractor/src/utils/log.ts
================================================
import * as _debug from "debug";
if (process.env.NODE_ENV === "development") {
_debug.enable("cs:*");
}
const debug = _debug("cs:git-extractor");
export default function log(message: string) {
debug(message);
}
================================================
FILE: packages/git-extractor/tsconfig.json
================================================
{
"compilerOptions": {
/* Basic Options */
"target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'commonjs', 'amd', 'system', 'umd' or 'es2015'. */,
"lib": [
"es2015",
"dom"
] /* Specify library files to be included in the compilation: */,
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
"sourceMap": true /* Generates corresponding '.map' file. */,
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./dist" /* Redirect output structure to the directory. */,
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true /* Enable all strict type-checking options. */,
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [] /* List of folders to include type definitions from. */
// "types": [] /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
/* Source Map Options */
// "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
"skipLibCheck": true,
"useUnknownInCatchVariables": false
},
"include": ["src/**/*.ts", "typings/**/*.ts"],
"exclude": ["node_modules", "__tests__", "build", "**/*.test.ts", "temp"]
}
================================================
FILE: packages/hmaeo.yml
================================================
heahea
================================================
FILE: packages/import-utils/.gitignore
================================================
*.js
*.js.map
lib
================================================
FILE: packages/import-utils/LICENSE
================================================
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2018 CodeSandbox BV. <https://codesandbox.io/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.
================================================
FILE: packages/import-utils/package.json
================================================
{
"name": "codesandbox-import-utils",
"version": "2.2.3",
"main": "lib/index.js",
"files": [
"lib/**"
],
"scripts": {
"build": "rimraf lib && tsc -p tsconfig.json",
"test": "jest",
"test:watch": "jest --watch",
"prepublish": "yarn build"
},
"devDependencies": {
"rimraf": "^2.6.2"
},
"dependencies": {
"codesandbox-import-util-types": "^2.2.3",
"istextorbinary": "^6.0.0",
"lz-string": "^1.4.4"
},
"jest": {
"transform": {
".(ts|tsx)": "<rootDir>../../node_modules/ts-jest/preprocessor.js"
},
"testEnvironment": "node",
"moduleFileExtensions": [
"ts",
"tsx",
"js",
"json"
],
"testPathIgnorePatterns": [
"<rootDir>/node_modules/",
"<rootDir>/dist/",
"<rootDir>/lib/"
],
"testRegex": "(/__tests__/.*|\\.(test|spec))\\.(ts|tsx|js)$"
},
"gitHead": "3cdcdea389d39f2a92be73dcb73496f68c8ada41"
}
================================================
FILE: packages/import-utils/src/api/define.ts
================================================
import { ITemplate } from "codesandbox-import-util-types";
import * as LZString from "lz-string";
export interface IFiles {
[key: string]: {
content: string;
isBinary: boolean;
};
}
function compress(input: string) {
return LZString.compressToBase64(input)
.replace(/\+/g, `-`) // Convert '+' to '-'
.replace(/\//g, `_`) // Convert '/' to '_'
.replace(/=+$/, ``); // Remove ending '='
}
export function getParameters(parameters: {
files: IFiles;
template?: ITemplate;
}) {
return compress(JSON.stringify(parameters));
}
================================================
FILE: packages/import-utils/src/create-sandbox/__mocks__/pacote.ts
================================================
export const manifest = () => {
return { version: "15.5.4" };
};
================================================
FILE: packages/import-utils/src/create-sandbox/__tests__/__snapshots__/html-parser.test.ts.snap
================================================
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`html-parser can retrieve body from html 1`] = `
{
"body": "
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run \`npm start\` in this folder.
To create a production bundle, use \`npm run build\`.
-->
",
"externalResources": [],
}
`;
exports[`html-parser can retrieve css external resources 1`] = `
{
"body": "
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run \`npm start\` in this folder.
To create a production bundle, use \`npm run build\`.
-->
",
"externalResources": [
"https://redux-form.com/6.8.0/bundle.css",
"//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.3.0/css/font-awesome.min.css",
],
}
`;
exports[`html-parser can retrieve js external resources 1`] = `
{
"body": "
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run \`npm start\` in this folder.
To create a production bundle, use \`npm run build\`.
-->
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.slim.min.js"></script>
",
"externalResources": [
"https://redux-form.com/6.8.0/bundle.css",
"//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.3.0/css/font-awesome.min.css",
"https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.slim.min.js",
],
}
`;
================================================
FILE: packages/import-utils/src/create-sandbox/__tests__/html-parser.test.ts
================================================
import parser from "../html-parser";
describe("html-parser", () => {
it("can retrieve body from html", () => {
const BODY_HTML = `
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Redux Shopping Cart Example</title>
</head>
<body>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run \`npm start\` in this folder.
To create a production bundle, use \`npm run build\`.
-->
</body>
</html>
`;
expect(parser(BODY_HTML)).toMatchSnapshot();
});
it("can retrieve js external resources", () => {
const BODY_HTML = `
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Redux Shopping Cart Example</title>
<link href="https://redux-form.com/6.8.0/bundle.css"
media="screen, projection"
rel="stylesheet" type="text/css"/>
<link href="//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.3.0/css/font-awesome.min.css"
media="screen, projection" rel="stylesheet" type="text/css"/>
</head>
<body>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run \`npm start\` in this folder.
To create a production bundle, use \`npm run build\`.
-->
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.slim.min.js"></script>
</body>
</html>
`;
expect(parser(BODY_HTML)).toMatchSnapshot();
});
it("can retrieve css external resources", () => {
const BODY_HTML = `
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Redux Shopping Cart Example</title>
<link href="https://redux-form.com/6.8.0/bundle.css"
media="screen, projection"
rel="stylesheet" type="text/css"/>
<link href="//cdnjs.cloudflare.com/ajax/libs/font-awesome/4.3.0/css/font-awesome.min.css"
media="screen, projection" rel="stylesheet" type="text/css"/>
</head>
<body>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run \`npm start\` in this folder.
To create a production bundle, use \`npm run build\`.
-->
</body>
</html>
`;
expect(parser(BODY_HTML)).toMatchSnapshot();
});
});
================================================
FILE: packages/import-utils/src/create-sandbox/__tests__/templates.test.ts
================================================
import { getTemplate } from "../templates";
describe("template detection", () => {
it("detects a react template", () => {
expect(
getTemplate(
{
dependencies: {},
devDependencies: {
"react-scripts": "latest",
},
},
{}
)
).toEqual("create-react-app");
});
it("detects a react template from forked create-react-app", () => {
expect(
getTemplate(
{
dependencies: {},
devDependencies: {
"@fork/react-scripts": "latest",
},
},
{}
)
).toEqual("create-react-app");
});
it("detects a nuxt template", () => {
expect(
getTemplate(
{
dependencies: {},
devDependencies: {
nuxt: "latest",
},
},
{}
)
).toEqual("nuxt");
});
it("detects a nuxt template when using nuxt3", () => {
expect(
getTemplate(
{
dependencies: {},
devDependencies: {
nuxt3: "latest",
},
},
{}
)
).toEqual("nuxt");
});
it("detects an apollo template", () => {
expect(
getTemplate(
{
dependencies: {},
devDependencies: {
"apollo-server": "latest",
},
},
{}
)
).toEqual("apollo");
});
});
================================================
FILE: packages/import-utils/src/create-sandbox/html-parser.ts
================================================
function isValidResource(resource: string) {
return (
resource.startsWith("https://") ||
resource.startsWith("http://") ||
resource.startsWith("//")
);
}
/**
* Checks line for css resource, returns if exist
*
* @param {string} line line to check
* @returns {(string | undefined)}
*/
function getCssResource(line: string): string | undefined {
const cssRegex = /<link[^]*href="(.*\.css)"/;
const match = line.match(cssRegex);
if (match && match[1]) {
const resource = match[1];
if (!isValidResource(resource)) {
return;
}
return resource;
}
}
/**
* Checks line for js resource, returns if resource exist
*
* @param {string} line line to check
* @returns {(string | undefined)}
*/
function getJsResource(line: string): string | undefined {
const jsRegex = /<script[^]*src="(.*)"/;
const match = line.match(jsRegex);
if (match && match[1]) {
const resource = match[1];
if (!isValidResource(resource)) {
return;
}
return resource;
}
}
/**
* Returns an array of strings to external resources, we deliberately don't check
* for javascript, since this is often added to the body. The body will be copied over
*
* @param {string} html
*/
function getExternalResources(html: string) {
return html
.split("\n")
.map((line) => getCssResource(line) || getJsResource(line))
.filter((x) => x);
}
/**
* Get all information in the body
*
* @param {string} html
*/
function getBodyContent(html: string): string | undefined {
const bodyRegex = /<body>([^]*)<\/body>/;
const match = html.match(bodyRegex);
if (match) {
return match[1];
}
}
/**
* Parses the html for external resources and body
*
* @export
* @param {string} html
*/
export default function parseHTML(html: string) {
const externalResources = getExternalResources(html);
const bodyContent = getBodyContent(html);
return {
body: bodyContent || '<div id="root"></div>',
externalResources,
};
}
================================================
FILE: packages/import-utils/src/create-sandbox/index.ts
================================================
import {
INormalizedModules,
IModule,
ISandbox,
ITemplate,
} from "codesandbox-import-util-types";
import denormalize from "../utils/files/denormalize";
import { parse as parseEnv } from "envfile";
import parseHTML from "./html-parser";
import { getMainFile, getTemplate } from "./templates";
interface IDependencies {
[name: string]: string;
}
function getHTMLInfo(html: IModule | undefined) {
if (!html) {
return { externalResources: [], file: null };
}
const { externalResources } = parseHTML(html.content);
return { externalResources, file: html };
}
function findMainFile(
directory: INormalizedModules,
mainFile: string,
template: ITemplate
) {
if (directory[mainFile]) {
return mainFile;
}
if (directory[getMainFile(template)]) {
return getMainFile(template);
}
if (directory["src/index.js"]) {
return "src/index.js";
}
if (directory["index.js"]) {
return "index.js";
}
return mainFile || getMainFile(template);
}
const CLOUD_TEMPLATES = [
"adonis",
"vue-cli",
"svelte",
"angular-cli",
"cxjs",
"gatsby",
"nuxt",
"next",
"reason",
"apollo",
"sapper",
"ember",
"nest",
"styleguidist",
"gridsome",
"vuepress",
"mdx-deck",
"quasar",
"docusaurus",
"remix",
"node",
];
function isCloudTemplate(template: ITemplate): boolean {
return CLOUD_TEMPLATES.indexOf(template) > -1;
}
function getSandboxMetadata(directory: INormalizedModules): {
title: string;
description: string;
tags: string[];
iconUrl?: string;
} {
const packageJson = directory["package.json"];
if (packageJson && packageJson.type === "directory") {
throw new Error("package.json is a directory");
}
let packageJsonPackage = packageJson ? JSON.parse(packageJson.content) : null;
const packageJsonInfo = {
title: packageJsonPackage?.title || packageJsonPackage?.name,
description: packageJsonPackage?.description,
tags: packageJsonPackage?.keywords || [],
iconUrl: packageJsonPackage?.iconUrl,
};
const templateInfo = directory[".codesandbox/template.json"];
if (templateInfo && "content" in templateInfo) {
const content = JSON.parse(templateInfo.content);
return {
title: content.title || packageJsonInfo.title,
description: content.description || packageJsonInfo.description,
tags: content.tags || packageJsonInfo.tags,
iconUrl: content.iconUrl || packageJsonInfo.iconUrl,
};
}
return packageJsonInfo;
}
/**
* Gets the prefilled environment variables by parsing either /.env.example
* or /.env.
*/
function getEnvironmentVariables(directory: INormalizedModules) {
const envFile = directory[".env"] || directory[".env.example"];
if (!envFile || envFile.type !== "file") {
return {};
}
return parseEnv(envFile.content);
}
/**
* Creates all relevant data for create a sandbox, like dependencies and which
* files are in a sandbox
*
* @export SandboxObject
* @param {Array<Module>} files
* @param {Array<Module>} directories
*/
export default async function createSandbox(
directory: INormalizedModules
): Promise<ISandbox> {
const packageJson = directory["package.json"];
if (packageJson && packageJson.type === "directory") {
throw new Error("package.json is a directory");
}
let packageJsonPackage = packageJson ? JSON.parse(packageJson.content) : null;
let template = getTemplate(packageJsonPackage, directory);
if (template === undefined) {
console.log("Got undefined template, defaulting to 'create-react-app'");
template = "create-react-app";
} else {
console.log(`Creating sandbox with template '${template}'`);
}
packageJsonPackage = packageJsonPackage || { main: "/index.html" };
const mainFileUnix = findMainFile(
directory,
packageJsonPackage.main,
template
);
const mainFile =
process.platform === "win32"
? mainFileUnix.replace(/\//g, "\\")
: mainFileUnix;
// Give the sandboxModules to getDependencies to fetch which devDependencies
// are used in the code
const metadata = getSandboxMetadata(directory);
const { modules, directories } = denormalize(directory);
return {
title: metadata.title,
description: metadata.description,
tags: metadata.tags,
modules,
directories,
externalResources: [],
environmentVariables: getEnvironmentVariables(directory),
template,
entry: mainFile,
v2: isCloudTemplate(template),
templateParams: metadata.iconUrl
? {
iconUrl: metadata.iconUrl,
}
: undefined,
};
}
================================================
FILE: packages/import-utils/src/create-sandbox/templates.ts
================================================
import { INormalizedModules, ITemplate } from "codesandbox-import-util-types";
export function getMainFile(template: ITemplate) {
switch (template) {
case "adonis":
return "server.js";
case "vue-cli":
return "src/main.js";
case "angular-cli":
return "src/main.ts";
case "create-react-app-typescript":
return "src/main.tsx";
case "parcel":
case "static":
return "index.html";
case "gatsby":
return "src/pages/index.js";
case "gridsome":
return "src/pages/Index.vue";
case "mdx-deck":
return "deck.mdx";
case "quasar":
return "src/pages/Index.vue";
case "styleguidist":
case "nuxt":
case "next":
case "apollo":
case "reason":
case "sapper":
case "nest":
case "remix":
case "vuepress":
case "styleguidist":
return "package.json";
default:
return "src/index.js";
}
}
const SANDBOX_CONFIG = "sandbox.config.json";
const TEMPLATE_CONFIG = ".codesandbox/template.json";
const MAX_CLIENT_DEPENDENCY_COUNT = 50;
type Dependencies = { [name: string]: string };
type PackageJSON = {
dependencies?: Dependencies;
devDependencies?: Dependencies;
};
export function getTemplate(
pkg: PackageJSON | null,
modules: INormalizedModules
): ITemplate | undefined {
const sandboxConfig =
modules[SANDBOX_CONFIG] || modules[`/${SANDBOX_CONFIG}`];
if (sandboxConfig && sandboxConfig.type !== "directory") {
try {
const config = JSON.parse(sandboxConfig.content);
if (config.template) {
return config.template;
}
} catch (e) {}
}
const templateConfig =
modules[TEMPLATE_CONFIG] || modules[`/${TEMPLATE_CONFIG}`];
if (templateConfig && templateConfig.type !== "directory") {
try {
const config = JSON.parse(templateConfig.content);
if (config.runtime) {
return config.runtime;
}
} catch (e) {}
}
if (
".codesandbox/Dockerfile" in modules ||
".devcontainer/devcontainer.json" in modules
) {
// We should return "cloud" here, once the server supports it.
return "node";
}
if (!pkg) {
return "static";
}
const { dependencies = {}, devDependencies = {} } = pkg;
const totalDependencies = [
...Object.keys(dependencies),
...Object.keys(devDependencies),
];
const moduleNames = Object.keys(modules);
const adonis = ["@adonisjs/framework", "@adonisjs/core"];
if (totalDependencies.some((dep) => adonis.indexOf(dep) > -1)) {
return "adonis";
}
const nuxt = ["nuxt", "nuxt-edge", "nuxt-ts", "nuxt-ts-edge", "nuxt3"];
if (totalDependencies.some((dep) => nuxt.indexOf(dep) > -1)) {
return "nuxt";
}
if (totalDependencies.indexOf("next") > -1) {
return "next";
}
const apollo = [
"apollo-server",
"apollo-server-express",
"apollo-server-hapi",
"apollo-server-koa",
"apollo-server-lambda",
"apollo-server-micro",
];
if (totalDependencies.some((dep) => apollo.indexOf(dep) > -1)) {
return "apollo";
}
if (totalDependencies.indexOf("mdx-deck") > -1) {
return "mdx-deck";
}
if (totalDependencies.indexOf("gridsome") > -1) {
return "gridsome";
}
if (totalDependencies.indexOf("vuepress") > -1) {
return "vuepress";
}
if (totalDependencies.indexOf("ember-cli") > -1) {
return "ember";
}
if (totalDependencies.indexOf("sapper") > -1) {
return "sapper";
}
if (totalDependencies.indexOf("gatsby") > -1) {
return "gatsby";
}
if (totalDependencies.indexOf("quasar") > -1) {
return "quasar";
}
if (totalDependencies.indexOf("@docusaurus/core") > -1) {
return "docusaurus";
}
if (totalDependencies.indexOf("remix") > -1) {
return "remix";
}
if (totalDependencies.indexOf("astro") > -1) {
return "node";
}
if (totalDependencies.indexOf("vite") > -1) {
if (totalDependencies.indexOf("react-redux") > -1) {
// Pretty bad hack to ensure that the examples of Redux
// still run in the old embed: https://github.com/codesandbox/codesandbox-client/issues/8282
//
// We should remove this once either:
// 1. the existing embed works with VMs
// 2. our new embeds support all query params
return "create-react-app";
}
return "node";
}
const tanstackDependencies = [
"@tanstack/start",
"@tanstack/solid-start",
"@tanstack/react-start",
];
if (totalDependencies.some((dep) => tanstackDependencies.indexOf(dep) > -1)) {
return "node";
}
if (totalDependencies.indexOf("vanjs-core") > -1) {
return "node";
}
if (totalDependencies.indexOf("mini-van-plate") > -1) {
return "node";
}
// CLIENT
if (moduleNames.some((m) => m.endsWith(".re"))) {
return "reason";
}
const parcel = ["parcel-bundler", "parcel"];
if (totalDependencies.some((dep) => parcel.indexOf(dep) > -1)) {
return "parcel";
}
const dojo = ["@dojo/core", "@dojo/framework"];
if (totalDependencies.some((dep) => dojo.indexOf(dep) > -1)) {
return "@dojo/cli-create-app";
}
if (
totalDependencies.indexOf("@nestjs/core") > -1 ||
totalDependencies.indexOf("@nestjs/common") > -1
) {
return "nest";
}
if (totalDependencies.indexOf("react-styleguidist") > -1) {
return "styleguidist";
}
if (
totalDependencies.some((dependency) =>
/^(@[\w-]+\/)?react-scripts$/.test(dependency)
)
) {
return "create-react-app";
}
if (totalDependencies.indexOf("react-scripts-ts") > -1) {
return "create-react-app-typescript";
}
if (totalDependencies.indexOf("@angular/core") > -1) {
return "angular-cli";
}
if (totalDependencies.indexOf("preact-cli") > -1) {
return "preact-cli";
}
if (
totalDependencies.indexOf("@sveltech/routify") > -1 ||
totalDependencies.indexOf("@roxi/routify") > -1
) {
return "node";
}
if (totalDependencies.indexOf("@frontity/core") > -1) {
return "node";
}
if (totalDependencies.indexOf("svelte") > -1) {
return "svelte";
}
if (totalDependencies.indexOf("vue") > -1) {
return "vue-cli";
}
if (totalDependencies.indexOf("cx") > -1) {
return "cxjs";
}
const nodeDeps = [
"express",
"koa",
"nodemon",
"ts-node",
"@tensorflow/tfjs-node",
"webpack-dev-server",
"snowpack",
];
if (totalDependencies.some((dep) => nodeDeps.indexOf(dep) > -1)) {
return "node";
}
if (Object.keys(dependencies).length >= MAX_CLIENT_DEPENDENCY_COUNT) {
// The dependencies are too much for client sandboxes to handle
return "node";
}
return undefined;
}
================================================
FILE: packages/import-utils/src/create-sandbox/utils/__tests__/__snapshots__/resolve.test.ts.snap
================================================
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`resolve getDirectoryPaths can resolve dir paths 1`] = `
{
"/world": {
"directoryShortid": undefined,
"shortid": "dir1",
"title": "world",
},
"/world/hello": {
"directoryShortid": "dir1",
"shortid": "dir2",
"title": "hello",
},
}
`;
================================================
FILE: packages/import-utils/src/create-sandbox/utils/__tests__/extract-requires.test.ts
================================================
import extractRequires from "../extract-requires";
describe("extractRequires", () => {
it("can find simple requires", () => {
const code = `
import React from 'react';
`;
expect(extractRequires(code)).toEqual(["react"]);
});
it("can find require statements", () => {
const code = `
const react = require('react');
`;
expect(extractRequires(code)).toEqual(["react"]);
});
it("can find dynamic require statements", () => {
const code = `
const react = import('react');
`;
expect(extractRequires(code)).toEqual(["react"]);
});
it("can find multiple statements", () => {
const code = `
import angular from 'angular';
import test from './test';
const react = import('react');
function run() {
const a = require('./test2');
}
`;
expect(extractRequires(code)).toEqual([
"angular",
"./test",
"react",
"./test2",
]);
});
it("can find import promises", () => {
const code = `
const reactDom = import('react-dom').then(dom => dom.render('a'));
`;
expect(extractRequires(code)).toEqual(["react-dom"]);
});
});
================================================
FILE: packages/import-utils/src/create-sandbox/utils/__tests__/resolve.test.ts
================================================
import { getDirectoryPaths } from "../resolve";
describe("resolve", () => {
describe("getDirectoryPaths", () => {
it("can resolve dir paths", () => {
const existingDirs = [
{
directoryShortid: undefined,
title: "world",
shortid: "dir1",
},
{
directoryShortid: "dir1",
title: "hello",
shortid: "dir2",
},
];
expect(getDirectoryPaths(existingDirs)).toMatchSnapshot();
});
});
});
================================================
FILE: packages/import-utils/src/create-sandbox/utils/extract-requires.ts
================================================
import * as acorn from "acorn";
import * as babel from "@babel/core";
import traverse from "@babel/traverse";
import { ImportDeclaration, CallExpression, Literal } from "estree";
const walk = require("acorn/dist/walk");
require("acorn-dynamic-import/lib/inject").default(acorn);
require("acorn-jsx/inject")(acorn);
require("acorn-object-spread/inject")(acorn);
const ECMA_VERSION = 2017;
const config = {
presets: [require("babel-preset-env"), require("babel-preset-react")],
plugins: [
require("babel-plugin-transform-async-to-generator"),
require("babel-plugin-transform-object-rest-spread"),
require("babel-plugin-transform-class-properties"),
require("babel-plugin-transform-decorators-legacy").default,
require("babel-plugin-dynamic-import-node").default,
],
};
export default function exportRequires(code: string) {
const requires: string[] = [];
try {
const { ast } = babel.transformSync(code, config)!;
if (ast) {
traverse(ast, {
enter(path: any) {
if (
path.node.type === "CallExpression" &&
path.node.callee.name === "require" &&
path.node.arguments[0]
) {
if (path.node.arguments[0].type === "StringLiteral") {
requires.push(path.node.arguments[0].value);
}
}
},
});
}
} catch (e) {
console.error(e);
}
return requires;
}
================================================
FILE: packages/import-utils/src/create-sandbox/utils/resolve.ts
================================================
import { ISandboxDirectory } from "codesandbox-import-util-types";
export function getDirectoryPaths(directories: ISandboxDirectory[]) {
let paths: { [p: string]: ISandboxDirectory } = {};
const addDirectory = (
prevPath: string,
directoryShortid: string | undefined
) => {
const dirs = directories.filter(
(d) => d.directoryShortid === directoryShortid
);
dirs.forEach((dir) => {
const dirPath = prevPath + "/" + dir.title;
paths[dirPath] = dir;
addDirectory(dirPath, dir.shortid);
});
};
directories
.filter((x) => x.directoryShortid == null)
.forEach((dir) => {
paths["/" + dir.title] = dir;
addDirectory("/" + dir.title, dir.shortid);
});
return paths;
}
================================================
FILE: packages/import-utils/src/index.ts
================================================
// stub
================================================
FILE: packages/import-utils/src/is-text.ts
================================================
import { isText as _isText } from "istextorbinary";
const jsRegex = /(t|j)sx?$/i;
const FILE_LOADER_REGEX =
/\.(ico|jpg|png|gif|eot|otf|webp|ttf|woff|woff2|mp4|webm)(\?.*)?$/i;
export const MAX_FILE_SIZE = 3 * 1024 * 1024; // 3 MB
export const isText = (filename: string, buffer: Buffer) => {
if (jsRegex.test(filename)) {
return true;
}
// We don't support null bytes in the database with postgres,
// so we need to mark it as binary if there are null bytes
const hasNullByte = buffer.toString().includes("\0");
return (
_isText(filename, buffer) &&
!FILE_LOADER_REGEX.test(filename) &&
!isTooBig(buffer) &&
!hasNullByte
);
};
export const isTooBig = (buffer: Buffer) => {
return buffer.length > MAX_FILE_SIZE;
};
================================================
FILE: packages/import-utils/src/utils/files/__tests__/__snapshots__/denormalize.test.ts.snap
================================================
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`denormalize can create nested directories 1`] = `
{
"directories": [
{
"directoryShortid": "GXOoy",
"shortid": "0",
"title": "test",
},
],
"modules": [
{
"code": "",
"directoryShortid": "0",
"isBinary": false,
"sha": undefined,
"shortid": "1",
"title": "new-file.js",
"uploadId": undefined,
},
],
}
`;
exports[`denormalize can create only directory 1`] = `
{
"directories": [
{
"directoryShortid": "GXOoy",
"shortid": "0",
"title": "test",
},
{
"directoryShortid": "0",
"shortid": "1",
"title": "test2",
},
],
"modules": [],
}
`;
exports[`denormalize can denormalize 1`] = `
{
"directories": [
{
"directoryShortid": undefined,
"shortid": "0",
"title": "world",
},
{
"directoryShortid": "0",
"shortid": "1",
"title": "hello",
},
],
"modules": [
{
"code": "hello",
"directoryShortid": undefined,
"isBinary": false,
"sha": undefined,
"shortid": "2",
"title": "index.js",
"uploadId": undefined,
},
{
"code": "hello2",
"directoryShortid": "0",
"isBinary": false,
"sha": undefined,
"shortid": "3",
"title": "index.js",
"uploadId": undefined,
},
{
"code": "hello3",
"directoryShortid": "1",
"isBinary": false,
"sha": undefined,
"shortid": "4",
"title": "index.js",
"uploadId": "123",
},
],
}
`;
exports[`denormalize can filter out existing directories 1`] = `
{
"directories": [],
"modules": [
{
"code": "hello",
"directoryShortid": undefined,
"isBinary": false,
"sha": undefined,
"shortid": "0",
"title": "index.js",
"uploadId": undefined,
},
{
"code": "hello2",
"directoryShortid": "dir1",
"isBinary": false,
"sha": undefined,
"shortid": "1",
"title": "index.js",
"uploadId": undefined,
},
{
"code": "hello3",
"directoryShortid": "dir2",
"isBinary": false,
"sha": undefined,
"shortid": "2",
"title": "index.js",
"uploadId": undefined,
},
{
"code": "hello4",
"directoryShortid": "dir4",
"isBinary": false,
"sha": undefined,
"shortid": "3",
"title": "template.md",
"uploadId": undefined,
},
{
"code": "hello5",
"directoryShortid": "dir5",
"isBinary": false,
"sha": undefined,
"shortid": "4",
"title": "config.yml",
"uploadId": undefined,
},
],
}
`;
================================================
FILE: packages/import-utils/src/utils/files/__tests__/denormalize.test.ts
================================================
import denormalize from "../denormalize";
let count = 0;
jest.mock("shortid", () => ({
generate: () => "" + count++,
}));
describe("denormalize", () => {
beforeEach(() => {
count = 0;
});
it("can denormalize", () => {
const paths = {
"/index.js": { content: "hello", isBinary: false },
"/world/index.js": { content: "hello2", isBinary: false },
"/world/hello/index.js": {
content: "hello3",
isBinary: false,
uploadId: "123",
},
};
expect(denormalize(paths)).toMatchSnapshot();
});
it("can denormalize with and without leading slash", () => {
const paths = {
"index.js": { content: "hello", isBinary: false },
"world/index.js": { content: "hello2", isBinary: false },
"world/hello/index.js": { content: "hello3", isBinary: false },
};
const slashPaths = {
"/index.js": { content: "hello", isBinary: false },
"/world/index.js": { content: "hello2", isBinary: false },
"/world/hello/index.js": { content: "hello3", isBinary: false },
};
const firstDenormalize = denormalize(paths);
count = 0;
const secondDenormalize = denormalize(slashPaths);
expect(firstDenormalize).toEqual(secondDenormalize);
});
it("can filter out existing directories", () => {
const paths = {
"index.js": { content: "hello", isBinary: false },
"world/index.js": { content: "hello2", isBinary: false },
"world/hello/index.js": { content: "hello3", isBinary: false },
".github/ISSUE_TEMPLATES/template.md": {
content: "hello4",
isBinary: false,
},
".github/workflows/config.yml": { content: "hello5", isBinary: false },
};
const existingDirs = [
{
directoryShortid: undefined,
title: "world",
shortid: "dir1",
},
{
directoryShortid: "dir1",
title: "hello",
shortid: "dir2",
},
{
directoryShortid: undefined,
title: ".github",
shortid: "dir3",
},
{
directoryShortid: "dir3",
title: "ISSUE_TEMPLATES",
shortid: "dir4",
},
{
directoryShortid: "dir3",
title: "workflows",
shortid: "dir5",
},
];
const denormalized = denormalize(paths, existingDirs);
expect(denormalized).toMatchSnapshot();
expect(denormalized.directories).toEqual([]);
});
it("can create nested directories", () => {
const paramFiles = {
"/src/test/new-file.js": { isBinary: false, content: "" },
};
const existingDirs = [
{
directoryShortid: null,
shortid: "rgkK4",
title: "public",
},
{
directoryShortid: null,
shortid: "GXOoy",
title: "src",
},
];
const denormalized = denormalize(paramFiles, existingDirs);
expect(denormalized).toMatchSnapshot();
});
it("can create only directory", () => {
const paramFiles: { "/src/test/test2": { type: "directory" } } = {
"/src/test/test2": { type: "directory" },
};
const existingDirs = [
{
directoryShortid: null,
shortid: "rgkK4",
title: "public",
},
{
directoryShortid: null,
shortid: "GXOoy",
title: "src",
},
];
const denormalized = denormalize(paramFiles, existingDirs);
expect(denormalized).toMatchSnapshot();
});
});
================================================
FILE: packages/import-utils/src/utils/files/denormalize.ts
================================================
import { dirname, basename } from "path";
import {
INormalizedModules,
IModule,
ISandboxFile,
ISandboxDirectory,
IBinaryModule,
} from "codesandbox-import-util-types";
import { generate as generateShortid } from "shortid";
import { getDirectoryPaths } from "../../create-sandbox/utils/resolve";
function generateSandboxFile(
module: IModule | IBinaryModule,
path: string,
parentDirectoryShortid?: string
): ISandboxFile {
const sandboxFile: ISandboxFile = {
shortid: generateShortid(),
code: module.content,
directoryShortid: parentDirectoryShortid,
title: basename(path),
uploadId: module.uploadId,
isBinary: module.isBinary,
sha: module.sha,
};
if ("binaryContent" in module) {
sandboxFile.binaryContent = module.binaryContent;
}
return sandboxFile;
}
function createDirectoryRecursively(
path: string,
directories: { [path: string]: ISandboxDirectory }
) {
if (directories[path]) {
return directories[path];
}
const parentDir = dirname(path);
// This means root, so create it
if (parentDir === ".") {
directories[path] = generateSandboxDirectory(path, undefined);
return;
}
if (!directories[parentDir]) {
createDirectoryRecursively(parentDir, directories);
}
directories[path] = generateSandboxDirectory(
basename(path),
directories[parentDir].shortid
);
}
function generateSandboxDirectory(
title: string,
parentDirectoryShortid?: string
): ISandboxDirectory {
return {
shortid: generateShortid(),
directoryShortid: parentDirectoryShortid,
title,
};
}
export default function denormalize(
paramFiles: INormalizedModules,
existingDirs: ISandboxDirectory[] = []
) {
const existingDirPathsParams = getDirectoryPaths(existingDirs);
// Remove all leading slashes
let existingDirPaths: {
[p: string]: ISandboxDirectory;
} = {};
Object.keys(existingDirPathsParams).forEach((path) => {
existingDirPaths[path.replace(/^\//, "")] = existingDirPathsParams[path];
});
let files: INormalizedModules = {};
Object.keys(paramFiles).forEach((path) => {
files[path.replace(/^\//, "")] = paramFiles[path];
});
const directories: Set<string> = new Set();
Object.keys(files).forEach((path) => {
const dir = dirname(path);
if (dir !== "." && !existingDirPaths["/" + dir]) {
directories.add(dirname(path));
}
const file = files[path];
if (file.type === "directory") {
directories.add(path);
}
});
const sandboxDirectories: {
[path: string]: ISandboxDirectory;
} = { ...existingDirPaths };
Array.from(directories).forEach((dirPath) => {
createDirectoryRecursively(dirPath, sandboxDirectories);
});
const sandboxModules: ISandboxFile[] = Object.keys(files)
.map((path) => {
const dir = sandboxDirectories[dirname(path)];
const parentShortid = dir ? dir.shortid : undefined;
const fileOrDirectory = files[path];
if (fileOrDirectory.type === "directory") {
return;
} else {
return generateSandboxFile(fileOrDirectory, path, parentShortid);
}
})
.filter((x): x is ISandboxFile => x !== undefined);
const dirs: unknown = Object.keys(sandboxDirectories)
.map((s) => !existingDirPaths[s] && sandboxDirectories[s])
.filter(Boolean);
return {
modules: sandboxModules,
directories: dirs as ISandboxDirectory[],
};
}
================================================
FILE: packages/import-utils/src/utils/files/normalize.ts
================================================
import { join } from "path";
import {
ISandboxFile,
ISandboxDirectory,
INormalizedModules,
} from "codesandbox-import-util-types";
function findSandboxFiles(
modules: ISandboxFile[],
directories: ISandboxDirectory[],
currentDir: string | null,
path: string = ""
): INormalizedModules {
let result: INormalizedModules = {};
const modulesInDirectory = modules.filter(
(m) => m.directoryShortid === currentDir
);
modulesInDirectory.forEach((m) => {
const newPath = join(path, m.title);
result[newPath] = { content: m.code || "", isBinary: m.isBinary };
});
const childrenFiles = directories
.filter((d) => d.directoryShortid === currentDir)
.forEach((dir) => {
const newPath = join(path, dir.title);
const dirResult = findSandboxFiles(
modules,
directories,
dir.shortid,
newPath
);
result = { ...result, ...dirResult };
});
return result;
}
export default function normalizeSandboxFiles(
modules: ISandboxFile[],
directories: ISandboxDirectory[]
): INormalizedModules {
return findSandboxFiles(modules, directories, null);
}
================================================
FILE: packages/import-utils/tsconfig.json
================================================
{
"compilerOptions": {
/* Basic Options */
"target": "es5" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'commonjs', 'amd', 'system', 'umd' or 'es2015'. */,
"lib": [
"es5",
"es2015",
"dom"
] /* Specify library files to be included in the compilation: */,
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
"declaration": true /* Generates corresponding '.d.ts' file. */,
"sourceMap": true /* Generates corresponding '.map' file. */,
"declarationMap": true,
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./lib" /* Redirect output structure to the directory. */,
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true /* Enable all strict type-checking options. */,
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [] /* List of folders to include type definitions from. */
// "types": [] /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
/* Source Map Options */
// "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
"skipLibCheck": true,
"useUnknownInCatchVariables": false
},
"include": ["src/**/*.ts"],
"exclude": [
"node_modules",
"build",
"**/*.test.ts",
"temp",
"**/__mocks__",
"**/__tests__"
]
}
================================================
FILE: packages/types/LICENSE
================================================
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2018 CodeSandbox BV. <https://codesandbox.io/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.
================================================
FILE: packages/types/index.d.ts
================================================
export interface IModule {
content: string; // If isBinary is true this will be a URL
isBinary: boolean;
type?: "file";
uploadId?: string;
sha?: string;
}
export interface IBinaryModule extends IModule {
binaryContent: string;
}
export interface IDirectory {
type: "directory";
}
export interface INormalizedModules {
[path: string]: IModule | IBinaryModule | IDirectory;
}
export interface ISandboxFile {
title: string;
code: string;
shortid: string;
isBinary: boolean;
binaryContent?: string;
uploadId?: string;
directoryShortid: string | undefined | null;
sha?: string;
}
export interface ISandboxDirectory {
shortid: string;
title: string;
directoryShortid: string | undefined | null;
}
export type ITemplate =
| "adonis"
| "vue-cli"
| "preact-cli"
| "svelte"
| "create-react-app-typescript"
| "create-react-app"
| "angular-cli"
| "parcel"
| "@dojo/cli-create-app"
| "cxjs"
| "gatsby"
| "nuxt"
| "next"
| "reason"
| "apollo"
| "sapper"
| "ember"
| "nest"
| "static"
| "styleguidist"
| "gridsome"
| "vuepress"
| "mdx-deck"
| "quasar"
| "docusaurus"
| "remix"
| "node";
export interface ISandbox {
title: string;
description: string;
tags: string[];
modules: ISandboxFile[];
directories: ISandboxDirectory[];
externalResources: string[];
template: ITemplate;
entry: string;
environmentVariables: Record<string, string>;
v2?: boolean;
templateParams?: {
iconUrl?: string;
};
}
================================================
FILE: packages/types/package.json
================================================
{
"name": "codesandbox-import-util-types",
"version": "2.2.3",
"gitHead": "3cdcdea389d39f2a92be73dcb73496f68c8ada41"
}
================================================
FILE: tsconfig.json
================================================
{
"compilerOptions": {
/* Basic Options */
"target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'commonjs', 'amd', 'system', 'umd' or 'es2015'. */,
"lib": [
"es2015",
"dom"
] /* Specify library files to be included in the compilation: */,
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
"sourceMap": true /* Generates corresponding '.map' file. */,
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./dist" /* Redirect output structure to the directory. */,
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true /* Enable all strict type-checking options. */,
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [] /* List of folders to include type definitions from. */
// "types": [] /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
/* Source Map Options */
// "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
"skipLibCheck": true,
"useUnknownInCatchVariables": false
},
"include": ["src/**/*.ts", "typings/**/*.ts"],
"exclude": ["node_modules", "__tests__", "build", "**/*.test.ts", "temp"]
}
gitextract__6b2bp8o/ ├── .codesandbox/ │ └── tasks.json ├── .dockerignore ├── .eslintrc.js ├── .github/ │ └── workflows/ │ └── build-image.yml ├── .gitignore ├── .prettierrc ├── Dockerfile ├── LICENSE ├── catalog-info.yaml ├── lerna.json ├── package.json ├── packages/ │ ├── cli/ │ │ ├── .gitignore │ │ ├── LICENSE │ │ ├── README.md │ │ ├── package.json │ │ ├── src/ │ │ │ ├── api/ │ │ │ │ └── define.ts │ │ │ ├── cfg.ts │ │ │ ├── commands/ │ │ │ │ ├── deploy.ts │ │ │ │ ├── login.ts │ │ │ │ ├── logout.ts │ │ │ │ └── token.ts │ │ │ ├── github/ │ │ │ │ └── url.ts │ │ │ ├── index.ts │ │ │ └── utils/ │ │ │ ├── api.ts │ │ │ ├── confirm.ts │ │ │ ├── env.ts │ │ │ ├── log.ts │ │ │ ├── parse-sandbox/ │ │ │ │ ├── file-error.ts │ │ │ │ ├── index.ts │ │ │ │ └── upload-files.ts │ │ │ └── url.ts │ │ ├── tsconfig.json │ │ ├── tslint.json │ │ └── typings/ │ │ └── extensions/ │ │ └── json.d.ts │ ├── git-extractor/ │ │ ├── .gitignore │ │ ├── config/ │ │ │ └── .gitkeep │ │ ├── package.json │ │ ├── src/ │ │ │ ├── index.ts │ │ │ ├── middleware/ │ │ │ │ ├── appsignal.ts │ │ │ │ ├── camelize.ts │ │ │ │ ├── decamelize.ts │ │ │ │ ├── error-handler.ts │ │ │ │ ├── logger.ts │ │ │ │ └── not-found.ts │ │ │ ├── routes/ │ │ │ │ ├── define.test.ts │ │ │ │ ├── define.ts │ │ │ │ └── github/ │ │ │ │ ├── api.ts │ │ │ │ ├── index.ts │ │ │ │ ├── pull/ │ │ │ │ │ └── download.ts │ │ │ │ ├── push/ │ │ │ │ │ ├── index.ts │ │ │ │ │ └── utils/ │ │ │ │ │ ├── __tests__/ │ │ │ │ │ │ └── delta.test.ts │ │ │ │ │ ├── create-blobs.ts │ │ │ │ │ └── delta.ts │ │ │ │ └── types.d.ts │ │ │ └── utils/ │ │ │ ├── appsignal.ts │ │ │ ├── delay.ts │ │ │ ├── env.ts │ │ │ └── log.ts │ │ └── tsconfig.json │ ├── hmaeo.yml │ ├── import-utils/ │ │ ├── .gitignore │ │ ├── LICENSE │ │ ├── package.json │ │ ├── src/ │ │ │ ├── api/ │ │ │ │ └── define.ts │ │ │ ├── create-sandbox/ │ │ │ │ ├── __mocks__/ │ │ │ │ │ └── pacote.ts │ │ │ │ ├── __tests__/ │ │ │ │ │ ├── __snapshots__/ │ │ │ │ │ │ └── html-parser.test.ts.snap │ │ │ │ │ ├── html-parser.test.ts │ │ │ │ │ └── templates.test.ts │ │ │ │ ├── html-parser.ts │ │ │ │ ├── index.ts │ │ │ │ ├── templates.ts │ │ │ │ └── utils/ │ │ │ │ ├── __tests__/ │ │ │ │ │ ├── __snapshots__/ │ │ │ │ │ │ └── resolve.test.ts.snap │ │ │ │ │ ├── extract-requires.test.ts │ │ │ │ │ └── resolve.test.ts │ │ │ │ ├── extract-requires.ts │ │ │ │ └── resolve.ts │ │ │ ├── index.ts │ │ │ ├── is-text.ts │ │ │ └── utils/ │ │ │ └── files/ │ │ │ ├── __tests__/ │ │ │ │ ├── __snapshots__/ │ │ │ │ │ └── denormalize.test.ts.snap │ │ │ │ └── denormalize.test.ts │ │ │ ├── denormalize.ts │ │ │ └── normalize.ts │ │ └── tsconfig.json │ └── types/ │ ├── LICENSE │ ├── index.d.ts │ └── package.json └── tsconfig.json
SYMBOL INDEX (181 symbols across 34 files)
FILE: packages/cli/src/cfg.ts
constant TTL (line 12) | const TTL = ms("8h");
type IUser (line 14) | interface IUser {
type IConfig (line 23) | interface IConfig {
constant CONFIG_NAME (line 29) | const CONFIG_NAME = IS_STAGING
function save (line 42) | async function save(data: object) {
function read (line 49) | async function read(): Promise<IConfig> {
function remove (line 84) | async function remove(key: string) {
function merge (line 96) | async function merge(data: object) {
function deleteUser (line 109) | async function deleteUser() {
function saveUser (line 120) | function saveUser(token: string, user: IUser) {
function getUser (line 130) | async function getUser(): Promise<IUser | undefined> {
function getToken (line 135) | async function getToken(): Promise<string | undefined> {
FILE: packages/cli/src/commands/deploy.ts
constant MAX_MODULE_COUNT (line 21) | const MAX_MODULE_COUNT = 500;
constant MAX_DIRECTORY_COUNT (line 22) | const MAX_DIRECTORY_COUNT = 500;
function showWarnings (line 31) | async function showWarnings(resolvedPath: string, errors: FileError[]) {
function showUploads (line 50) | async function showUploads(resolvedPath: string, uploads: IUploads) {
function registerCommand (line 72) | function registerCommand(program: typeof Commander) {
FILE: packages/cli/src/commands/login.ts
function handleSignIn (line 23) | async function handleSignIn() {
function login (line 53) | async function login() {
function registerCLI (line 74) | function registerCLI(program: typeof Commander) {
FILE: packages/cli/src/commands/logout.ts
function registerCLI (line 7) | function registerCLI(program: typeof Commander) {
FILE: packages/cli/src/commands/token.ts
function registerToken (line 6) | function registerToken(program: typeof Commander) {
FILE: packages/cli/src/github/url.ts
type IOptions (line 10) | interface IOptions {
function optionsToParameterizedUrl (line 30) | function optionsToParameterizedUrl(options: { [option: string]: any }) {
function getUrlOptions (line 42) | function getUrlOptions(options: IOptions) {
constant CODESANDBOX_ROOT (line 106) | const CODESANDBOX_ROOT = `https://codesandbox.io`;
function findGitRoot (line 108) | function findGitRoot() {
function getRepoPath (line 125) | function getRepoPath(options: IOptions) {
function getFullUrl (line 168) | function getFullUrl(type: "s" | "embed", options: IOptions) {
function getSandboxUrl (line 175) | function getSandboxUrl(options?: IOptions) {
function getEmbedUrl (line 179) | function getEmbedUrl(options?: IOptions) {
FILE: packages/cli/src/utils/api.ts
function uploadSandbox (line 29) | async function uploadSandbox(sandbox: ISandbox) {
function fetchUser (line 55) | async function fetchUser(token: string) {
function verifyUser (line 68) | async function verifyUser(token: string) {
function createUpload (line 77) | async function createUpload(filename: string, buffer: Buffer) {
FILE: packages/cli/src/utils/confirm.ts
function confirm (line 3) | async function confirm(question: string, defaultNo = false) {
FILE: packages/cli/src/utils/env.ts
constant IS_STAGING (line 1) | const IS_STAGING = process.env.CODESANDBOX_NODE_ENV === "development";
FILE: packages/cli/src/utils/log.ts
function log (line 3) | function log(text = "") {
function logCodeSandbox (line 7) | function logCodeSandbox() {
function extraHelp (line 16) | function extraHelp() {
function info (line 35) | function info(text: string) {
function error (line 39) | function error(text: string) {
function warn (line 45) | function warn(text: string) {
function success (line 49) | function success(text: string) {
FILE: packages/cli/src/utils/parse-sandbox/file-error.ts
class FileError (line 1) | class FileError extends Error {
method constructor (line 12) | constructor(message: string, path: string, isBinary = false) {
FILE: packages/cli/src/utils/parse-sandbox/index.ts
constant MAX_FILE_SIZE (line 8) | const MAX_FILE_SIZE = 5 * 1024 * 1024;
type IUploads (line 10) | interface IUploads {
function normalizeFilesInDirectory (line 14) | async function normalizeFilesInDirectory(
function parseSandbox (line 113) | async function parseSandbox(resolvedPath: string) {
FILE: packages/cli/src/utils/parse-sandbox/upload-files.ts
function uploadFiles (line 5) | async function uploadFiles(uploads: IUploads) {
FILE: packages/cli/src/utils/url.ts
constant BASE_URL (line 3) | const BASE_URL = IS_STAGING
constant CREATE_SANDBOX_URL (line 7) | const CREATE_SANDBOX_URL = BASE_URL + "/api/v1/sandboxes";
constant CREATE_UPLOAD_URL (line 8) | const CREATE_UPLOAD_URL =
constant GET_USER_URL (line 10) | const GET_USER_URL = BASE_URL + "/api/v1/users/current";
constant LOGIN_URL (line 11) | const LOGIN_URL = BASE_URL + "/cli/login";
constant VERIFY_USER_TOKEN_URL (line 13) | const VERIFY_USER_TOKEN_URL = BASE_URL + "/api/v1/auth/verify/";
FILE: packages/git-extractor/src/index.ts
constant DEFAULT_PORT (line 23) | const DEFAULT_PORT = process.env.PORT || 2000;
FILE: packages/git-extractor/src/middleware/logger.ts
type ILogParams (line 3) | interface ILogParams {
function log (line 10) | function log({ method, url, duration, error }: ILogParams) {
FILE: packages/git-extractor/src/routes/github/api.ts
constant API_URL (line 10) | const API_URL = "https://api.github.com";
constant REPO_BASE_URL (line 11) | const REPO_BASE_URL = API_URL + "/repos";
constant GITHUB_CLIENT_ID (line 13) | const GITHUB_CLIENT_ID = process.env.GITHUB_CLIENT_ID;
constant GITHUB_CLIENT_SECRET (line 14) | const GITHUB_CLIENT_SECRET = process.env.GITHUB_CLIENT_SECRET;
constant NOT_FOUND_MESSAGE (line 16) | const NOT_FOUND_MESSAGE =
function buildRepoApiUrl (line 19) | function buildRepoApiUrl(username: string, repo: string) {
function buildPullApiUrl (line 23) | function buildPullApiUrl(username: string, repo: string, pull: number) {
function buildCommitApiUrl (line 27) | function buildCommitApiUrl(username: string, repo: string, commitSha: st...
function buildTreesApiUrl (line 31) | function buildTreesApiUrl(username: string, repo: string, treeSha: strin...
function buildContentsApiUrl (line 35) | function buildContentsApiUrl(username: string, repo: string, path: strin...
function requestAxios (line 39) | function requestAxios(
function buildCompareApiUrl (line 86) | function buildCompareApiUrl(
function createAxiosRequestConfig (line 95) | function createAxiosRequestConfig(token?: string): AxiosRequestConfig {
function buildContentsUrl (line 110) | function buildContentsUrl(
function buildCommitsUrl (line 119) | function buildCommitsUrl(
function buildCommitsByPathUrl (line 128) | function buildCommitsByPathUrl(
type IRepoResponse (line 140) | interface IRepoResponse {
type ICompareResponse (line 149) | interface ICompareResponse {
type IContentResponse (line 169) | interface IContentResponse {
type ICommitResponse (line 175) | interface ICommitResponse {
type IPrResponse (line 183) | interface IPrResponse {
type IDeleteContentResponse (line 201) | interface IDeleteContentResponse {
function getComparison (line 207) | async function getComparison(
function getContent (line 227) | async function getContent(url: string, token: string) {
type RepoInfoCache (line 239) | type RepoInfoCache = {
function getRepo (line 247) | async function getRepo(username: string, repo: string, token?: string) {
function getTreeWithDeletedFiles (line 285) | async function getTreeWithDeletedFiles(
function getCommitTreeSha (line 348) | async function getCommitTreeSha(
function getLatestCommitShaOfFile (line 367) | async function getLatestCommitShaOfFile(
function isRepoPrivate (line 390) | async function isRepoPrivate(
type RightsResponse (line 400) | interface RightsResponse {
function fetchRights (line 411) | async function fetchRights(
type ITreeResponse (line 453) | interface ITreeResponse {
type IBlobResponse (line 460) | interface IBlobResponse {
function createPr (line 465) | async function createPr(
function createBlob (line 514) | async function createBlob(
type ICreateTreeResponse (line 531) | interface ICreateTreeResponse {
function createTree (line 537) | async function createTree(
type ICreateCommitResponse (line 557) | interface ICreateCommitResponse {
function createCommit (line 576) | async function createCommit(
type IUpdateReferenceResponse (line 597) | interface IUpdateReferenceResponse {
function updateReference (line 602) | async function updateReference(
type ICreateReferenceResponse (line 624) | interface ICreateReferenceResponse {
function createReference (line 634) | async function createReference(
type ICreateForkResponse (line 653) | interface ICreateForkResponse {
function createFork (line 661) | async function createFork(
type ICreateRepoResponse (line 679) | interface ICreateRepoResponse {
function getDefaultBranch (line 689) | async function getDefaultBranch(
function createRepo (line 699) | async function createRepo(
function doesRepoExist (line 738) | async function doesRepoExist(
type CommitResponse (line 760) | interface CommitResponse {
function resetShaCache (line 777) | function resetShaCache(gitInfo: IGitInfo) {
function fetchRepoInfo (line 783) | async function fetchRepoInfo(
function fetchPullInfo (line 903) | async function fetchPullInfo(
constant MAX_ZIP_SIZE (line 942) | const MAX_ZIP_SIZE = 128 * 1024 * 1024;
function downloadZip (line 944) | async function downloadZip(
function checkRemainingRateLimit (line 987) | async function checkRemainingRateLimit(
FILE: packages/git-extractor/src/routes/github/pull/download.ts
function downloadRepository (line 36) | async function downloadRepository(
FILE: packages/git-extractor/src/routes/github/push/index.ts
type IGitInfo (line 11) | interface IGitInfo {
type ITreeFile (line 18) | interface ITreeFile {
type IChanges (line 27) | interface IChanges {
type ITree (line 41) | type ITree = ITreeFile[];
function generateBranchName (line 43) | function generateBranchName() {
function createBranch (line 48) | async function createBranch(
function createFork (line 65) | async function createFork(
function createInitialCommit (line 104) | async function createInitialCommit(
function createCommit (line 119) | async function createCommit(
function createRepo (line 175) | async function createRepo(
FILE: packages/git-extractor/src/routes/github/push/utils/create-blobs.ts
function downloadContent (line 7) | async function downloadContent(module: IModule): Promise<string> {
function createBlobs (line 17) | async function createBlobs(
FILE: packages/git-extractor/src/routes/github/push/utils/delta.ts
function getGitSha (line 6) | function getGitSha(content: string) {
type INormalizedAndDeletedModules (line 14) | interface INormalizedAndDeletedModules {
function getDelta (line 18) | function getDelta(
FILE: packages/git-extractor/src/routes/github/types.d.ts
type Module (line 1) | type Module = {
type NormalizedDirectory (line 13) | type NormalizedDirectory = {
type DownloadedFile (line 20) | type DownloadedFile = Module & {
FILE: packages/git-extractor/src/utils/delay.ts
function delay (line 1) | function delay(ms: number): Promise<void> {
FILE: packages/git-extractor/src/utils/log.ts
function log (line 8) | function log(message: string) {
FILE: packages/import-utils/src/api/define.ts
type IFiles (line 4) | interface IFiles {
function compress (line 11) | function compress(input: string) {
function getParameters (line 18) | function getParameters(parameters: {
FILE: packages/import-utils/src/create-sandbox/html-parser.ts
function isValidResource (line 1) | function isValidResource(resource: string) {
function getCssResource (line 15) | function getCssResource(line: string): string | undefined {
function getJsResource (line 34) | function getJsResource(line: string): string | undefined {
function getExternalResources (line 53) | function getExternalResources(html: string) {
function getBodyContent (line 65) | function getBodyContent(html: string): string | undefined {
function parseHTML (line 81) | function parseHTML(html: string) {
FILE: packages/import-utils/src/create-sandbox/index.ts
type IDependencies (line 13) | interface IDependencies {
function getHTMLInfo (line 17) | function getHTMLInfo(html: IModule | undefined) {
function findMainFile (line 27) | function findMainFile(
constant CLOUD_TEMPLATES (line 48) | const CLOUD_TEMPLATES = [
function isCloudTemplate (line 71) | function isCloudTemplate(template: ITemplate): boolean {
function getSandboxMetadata (line 75) | function getSandboxMetadata(directory: INormalizedModules): {
function getEnvironmentVariables (line 114) | function getEnvironmentVariables(directory: INormalizedModules) {
function createSandbox (line 132) | async function createSandbox(
FILE: packages/import-utils/src/create-sandbox/templates.ts
function getMainFile (line 3) | function getMainFile(template: ITemplate) {
constant SANDBOX_CONFIG (line 41) | const SANDBOX_CONFIG = "sandbox.config.json";
constant TEMPLATE_CONFIG (line 42) | const TEMPLATE_CONFIG = ".codesandbox/template.json";
constant MAX_CLIENT_DEPENDENCY_COUNT (line 43) | const MAX_CLIENT_DEPENDENCY_COUNT = 50;
type Dependencies (line 45) | type Dependencies = { [name: string]: string };
type PackageJSON (line 46) | type PackageJSON = {
function getTemplate (line 50) | function getTemplate(
FILE: packages/import-utils/src/create-sandbox/utils/extract-requires.ts
constant ECMA_VERSION (line 11) | const ECMA_VERSION = 2017;
function exportRequires (line 24) | function exportRequires(code: string) {
FILE: packages/import-utils/src/create-sandbox/utils/resolve.ts
function getDirectoryPaths (line 3) | function getDirectoryPaths(directories: ISandboxDirectory[]) {
FILE: packages/import-utils/src/is-text.ts
constant FILE_LOADER_REGEX (line 5) | const FILE_LOADER_REGEX =
constant MAX_FILE_SIZE (line 7) | const MAX_FILE_SIZE = 3 * 1024 * 1024;
FILE: packages/import-utils/src/utils/files/denormalize.ts
function generateSandboxFile (line 13) | function generateSandboxFile(
function createDirectoryRecursively (line 35) | function createDirectoryRecursively(
function generateSandboxDirectory (line 61) | function generateSandboxDirectory(
function denormalize (line 72) | function denormalize(
FILE: packages/import-utils/src/utils/files/normalize.ts
function findSandboxFiles (line 9) | function findSandboxFiles(
function normalizeSandboxFiles (line 44) | function normalizeSandboxFiles(
FILE: packages/types/index.d.ts
type IModule (line 1) | interface IModule {
type IBinaryModule (line 9) | interface IBinaryModule extends IModule {
type IDirectory (line 13) | interface IDirectory {
type INormalizedModules (line 17) | interface INormalizedModules {
type ISandboxFile (line 21) | interface ISandboxFile {
type ISandboxDirectory (line 32) | interface ISandboxDirectory {
type ITemplate (line 38) | type ITemplate =
type ISandbox (line 67) | interface ISandbox {
Condensed preview — 87 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (179K chars).
[
{
"path": ".codesandbox/tasks.json",
"chars": 1023,
"preview": "{\n // These tasks will run in order when initializing your CodeSandbox project.\n \"setupTasks\": [\n {\n \"name\": \""
},
{
"path": ".dockerignore",
"chars": 154,
"preview": ".git\n.gitignore\nREADME.md\ndocker-compose.yml\nnode_modules\nDockerfile\ndist\n# Ignore generated credentials from google-git"
},
{
"path": ".eslintrc.js",
"chars": 73,
"preview": "module.exports = {\n extends: 'airbnb',\n plugins: ['react', 'jest'],\n};\n"
},
{
"path": ".github/workflows/build-image.yml",
"chars": 956,
"preview": "name: ci\n\non:\n push:\n branches:\n - \"master\"\n\njobs:\n docker:\n runs-on: ubuntu-latest\n\n permissions:\n "
},
{
"path": ".gitignore",
"chars": 102,
"preview": ".DS_Store\n\nnode_modules\nnpm-debug.log\nyarn-error.log\nbuild\npublic\ntemp\njest\n.aws\ndist\nlerna-debug.log\n"
},
{
"path": ".prettierrc",
"chars": 3,
"preview": "{}\n"
},
{
"path": "Dockerfile",
"chars": 144,
"preview": "FROM node:19-alpine as build\nWORKDIR /app\nCOPY . .\n\nRUN yarn\n\nRUN yarn build\n\nUSER node\n\nCMD [\"node\", \"./packages/git-ex"
},
{
"path": "LICENSE",
"chars": 179,
"preview": "Copyright (c) Ives van Hoorne\n\nUp is an Open Source project licensed under the terms of\nthe GPLv3 license. Please see <h"
},
{
"path": "catalog-info.yaml",
"chars": 1006,
"preview": "apiVersion: backstage.io/v1alpha1\nkind: Component\nmetadata:\n name: github-importer\n description: All importers & expor"
},
{
"path": "lerna.json",
"chars": 98,
"preview": "{\n \"lerna\": \"2.4.0\",\n \"packages\": [\"packages/*\"],\n \"version\": \"2.2.3\",\n \"npmClient\": \"yarn\"\n}\n"
},
{
"path": "package.json",
"chars": 875,
"preview": "{\n \"name\": \"codesandbox-importers\",\n \"version\": \"1.0.0\",\n \"description\": \"\",\n \"private\": true,\n \"scripts\": {\n \"b"
},
{
"path": "packages/cli/.gitignore",
"chars": 4,
"preview": "lib\n"
},
{
"path": "packages/cli/LICENSE",
"chars": 179,
"preview": "Copyright (c) Ives van Hoorne\n\nUp is an Open Source project licensed under the terms of\nthe GPLv3 license. Please see <h"
},
{
"path": "packages/cli/README.md",
"chars": 928,
"preview": "# codesandbox-cli\n\n> Upload your templates to codesandbox with a single command 🏖️\n\n[ {\n co"
},
{
"path": "packages/cli/src/utils/env.ts",
"chars": 78,
"preview": "export const IS_STAGING = process.env.CODESANDBOX_NODE_ENV === \"development\";\n"
},
{
"path": "packages/cli/src/utils/log.ts",
"chars": 1177,
"preview": "import chalk from \"chalk\";\n\nexport function log(text = \"\") {\n console.log(`> ${text}`);\n}\n\nexport function logCodeSandb"
},
{
"path": "packages/cli/src/utils/parse-sandbox/file-error.ts",
"chars": 469,
"preview": "export default class FileError extends Error {\n public path: string;\n public isBinary: boolean;\n\n /**\n * Creates an"
},
{
"path": "packages/cli/src/utils/parse-sandbox/index.ts",
"chars": 2966,
"preview": "import * as fs from \"fs-extra\";\nimport * as path from \"path\";\nimport { isText, isTooBig } from \"codesandbox-import-utils"
},
{
"path": "packages/cli/src/utils/parse-sandbox/upload-files.ts",
"chars": 545,
"preview": "import { IUploads } from \".\";\nimport { createUpload } from \"../api\";\nimport { INormalizedModules } from \"codesandbox-imp"
},
{
"path": "packages/cli/src/utils/url.ts",
"chars": 647,
"preview": "import { IS_STAGING } from \"./env\";\n\nexport const BASE_URL = IS_STAGING\n ? \"https://codesandbox.stream\"\n : \"https://co"
},
{
"path": "packages/cli/tsconfig.json",
"chars": 4678,
"preview": "{\n \"compilerOptions\": {\n /* Basic Options */\n \"target\": \"es3\" /* Specify ECMAScript target version: 'ES3' (defaul"
},
{
"path": "packages/cli/tslint.json",
"chars": 319,
"preview": "{\n \"defaultSeverity\": \"error\",\n \"extends\": [\"tslint:latest\", \"tslint-config-prettier\"],\n \"jsRules\": {},\n \"rules\": {\n"
},
{
"path": "packages/cli/typings/extensions/json.d.ts",
"chars": 111,
"preview": "declare module \"*.json\" {\n const package: {\n name: string;\n version: string;\n };\n export = package;\n}\n"
},
{
"path": "packages/git-extractor/.gitignore",
"chars": 23,
"preview": "config/production.json\n"
},
{
"path": "packages/git-extractor/config/.gitkeep",
"chars": 0,
"preview": ""
},
{
"path": "packages/git-extractor/package.json",
"chars": 2754,
"preview": "{\n \"name\": \"git-converter\",\n \"version\": \"2.2.3\",\n \"description\": \"\",\n \"main\": \"index.js\",\n \"private\": true,\n \"scri"
},
{
"path": "packages/git-extractor/src/index.ts",
"chars": 2596,
"preview": "import * as Sentry from \"@sentry/node\";\nimport * as Koa from \"koa\";\nimport * as bodyParser from \"koa-bodyparser\";\nimport"
},
{
"path": "packages/git-extractor/src/middleware/appsignal.ts",
"chars": 877,
"preview": "import { Context } from \"koa\";\nimport { appsignal } from \"../utils/appsignal\";\n\nconst appSignal = async (ctx: Context, n"
},
{
"path": "packages/git-extractor/src/middleware/camelize.ts",
"chars": 534,
"preview": "import { Context } from \"koa\";\nimport { camelizeKeys } from \"humps\";\n\nconst camelizeMiddleware = async (ctx: Context, ne"
},
{
"path": "packages/git-extractor/src/middleware/decamelize.ts",
"chars": 262,
"preview": "import { Context } from \"koa\";\nimport { decamelizeKeys } from \"humps\";\n\nconst decamelizeMiddleware = async (ctx: Context"
},
{
"path": "packages/git-extractor/src/middleware/error-handler.ts",
"chars": 526,
"preview": "import { Context } from \"koa\";\n\n// Error handler\nconst errorHandler = async (ctx: Context, next: () => Promise<any>) => "
},
{
"path": "packages/git-extractor/src/middleware/logger.ts",
"chars": 706,
"preview": "import { Middleware, Context } from \"koa\";\n\ninterface ILogParams {\n method: string;\n url: string;\n duration: number;\n"
},
{
"path": "packages/git-extractor/src/middleware/not-found.ts",
"chars": 244,
"preview": "import { Context } from \"koa\";\n\n// Not found handler\nconst notFound = async (ctx: Context, next: () => Promise<any>) => "
},
{
"path": "packages/git-extractor/src/routes/define.test.ts",
"chars": 906,
"preview": "import { createSandboxFromDefine } from \"./define\";\n\nit(\"can infer title and description\", async () => {\n const payload"
},
{
"path": "packages/git-extractor/src/routes/define.ts",
"chars": 1516,
"preview": "import { Context } from \"koa\";\nimport createSandbox from \"codesandbox-import-utils/lib/create-sandbox\";\nimport {\n INorm"
},
{
"path": "packages/git-extractor/src/routes/github/api.ts",
"chars": 23566,
"preview": "import * as Sentry from \"@sentry/node\";\nimport axios, { AxiosPromise, AxiosRequestConfig } from \"axios\";\nimport * as zip"
},
{
"path": "packages/git-extractor/src/routes/github/index.ts",
"chars": 8974,
"preview": "import * as Sentry from \"@sentry/node\";\nimport { IModule, INormalizedModules } from \"codesandbox-import-util-types\";\nimp"
},
{
"path": "packages/git-extractor/src/routes/github/pull/download.ts",
"chars": 3332,
"preview": "import * as JSZip from \"jszip\";\n\nimport { isText } from \"codesandbox-import-utils/lib/is-text\";\nimport { INormalizedModu"
},
{
"path": "packages/git-extractor/src/routes/github/push/index.ts",
"chars": 4838,
"preview": "import {\n IBinaryModule,\n IModule,\n INormalizedModules,\n} from \"codesandbox-import-util-types\";\n\nimport delay from \"."
},
{
"path": "packages/git-extractor/src/routes/github/push/utils/__tests__/delta.test.ts",
"chars": 2949,
"preview": "import { INormalizedModules } from \"codesandbox-import-util-types\";\nimport getDelta from \"../delta\";\n\ndescribe(\"commit\","
},
{
"path": "packages/git-extractor/src/routes/github/push/utils/create-blobs.ts",
"chars": 1015,
"preview": "import { IModule, INormalizedModules } from \"codesandbox-import-util-types\";\nimport fetch from \"node-fetch\";\n\nimport { c"
},
{
"path": "packages/git-extractor/src/routes/github/push/utils/delta.ts",
"chars": 1138,
"preview": "import { IModule, IDirectory } from \"codesandbox-import-util-types\";\nimport { ITree } from \"../index\";\n\nimport { createH"
},
{
"path": "packages/git-extractor/src/routes/github/types.d.ts",
"chars": 411,
"preview": "export type Module = {\n name: string;\n path: string;\n sha: string;\n size: number;\n url: string;\n html_url: string;"
},
{
"path": "packages/git-extractor/src/utils/appsignal.ts",
"chars": 211,
"preview": "import { Appsignal } from \"@appsignal/nodejs\";\n\nexport const appsignal = new Appsignal({\n active: process.env.NODE_ENV "
},
{
"path": "packages/git-extractor/src/utils/delay.ts",
"chars": 140,
"preview": "export default function delay(ms: number): Promise<void> {\n return new Promise((resolve) => {\n setTimeout(() => reso"
},
{
"path": "packages/git-extractor/src/utils/env.ts",
"chars": 89,
"preview": "export default process.env.NODE_ENV === \"production\"\n ? \"production\"\n : \"development\";\n"
},
{
"path": "packages/git-extractor/src/utils/log.ts",
"chars": 217,
"preview": "import * as _debug from \"debug\";\n\nif (process.env.NODE_ENV === \"development\") {\n _debug.enable(\"cs:*\");\n}\nconst debug ="
},
{
"path": "packages/git-extractor/tsconfig.json",
"chars": 4679,
"preview": "{\n \"compilerOptions\": {\n /* Basic Options */\n \"target\": \"es6\" /* Specify ECMAScript target version: 'ES3' (defaul"
},
{
"path": "packages/hmaeo.yml",
"chars": 7,
"preview": "heahea\n"
},
{
"path": "packages/import-utils/.gitignore",
"chars": 18,
"preview": "*.js\n*.js.map\nlib\n"
},
{
"path": "packages/import-utils/LICENSE",
"chars": 7487,
"preview": " GNU LESSER GENERAL PUBLIC LICENSE\n Version 3, 29 June 2007\n\nCopyright (C) 2018 "
},
{
"path": "packages/import-utils/package.json",
"chars": 933,
"preview": "{\n \"name\": \"codesandbox-import-utils\",\n \"version\": \"2.2.3\",\n \"main\": \"lib/index.js\",\n \"files\": [\n \"lib/**\"\n ],\n "
},
{
"path": "packages/import-utils/src/api/define.ts",
"chars": 555,
"preview": "import { ITemplate } from \"codesandbox-import-util-types\";\nimport * as LZString from \"lz-string\";\n\nexport interface IFil"
},
{
"path": "packages/import-utils/src/create-sandbox/__mocks__/pacote.ts",
"chars": 67,
"preview": "export const manifest = () => {\n return { version: \"15.5.4\" };\n};\n"
},
{
"path": "packages/import-utils/src/create-sandbox/__tests__/__snapshots__/html-parser.test.ts.snap",
"chars": 2293,
"preview": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n\nexports[`html-parser can retrieve body from html 1`] = `\n{\n \"body\": \"\n "
},
{
"path": "packages/import-utils/src/create-sandbox/__tests__/html-parser.test.ts",
"chars": 3531,
"preview": "import parser from \"../html-parser\";\n\ndescribe(\"html-parser\", () => {\n it(\"can retrieve body from html\", () => {\n co"
},
{
"path": "packages/import-utils/src/create-sandbox/__tests__/templates.test.ts",
"chars": 1399,
"preview": "import { getTemplate } from \"../templates\";\n\ndescribe(\"template detection\", () => {\n it(\"detects a react template\", () "
},
{
"path": "packages/import-utils/src/create-sandbox/html-parser.ts",
"chars": 1992,
"preview": "function isValidResource(resource: string) {\n return (\n resource.startsWith(\"https://\") ||\n resource.startsWith(\""
},
{
"path": "packages/import-utils/src/create-sandbox/index.ts",
"chars": 4566,
"preview": "import {\n INormalizedModules,\n IModule,\n ISandbox,\n ITemplate,\n} from \"codesandbox-import-util-types\";\nimport denorm"
},
{
"path": "packages/import-utils/src/create-sandbox/templates.ts",
"chars": 6625,
"preview": "import { INormalizedModules, ITemplate } from \"codesandbox-import-util-types\";\n\nexport function getMainFile(template: IT"
},
{
"path": "packages/import-utils/src/create-sandbox/utils/__tests__/__snapshots__/resolve.test.ts.snap",
"chars": 317,
"preview": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n\nexports[`resolve getDirectoryPaths can resolve dir paths 1`] = `\n{\n \"/world"
},
{
"path": "packages/import-utils/src/create-sandbox/utils/__tests__/extract-requires.test.ts",
"chars": 1175,
"preview": "import extractRequires from \"../extract-requires\";\n\ndescribe(\"extractRequires\", () => {\n it(\"can find simple requires\","
},
{
"path": "packages/import-utils/src/create-sandbox/utils/__tests__/resolve.test.ts",
"chars": 502,
"preview": "import { getDirectoryPaths } from \"../resolve\";\n\ndescribe(\"resolve\", () => {\n describe(\"getDirectoryPaths\", () => {\n "
},
{
"path": "packages/import-utils/src/create-sandbox/utils/extract-requires.ts",
"chars": 1423,
"preview": "import * as acorn from \"acorn\";\nimport * as babel from \"@babel/core\";\nimport traverse from \"@babel/traverse\";\nimport { I"
},
{
"path": "packages/import-utils/src/create-sandbox/utils/resolve.ts",
"chars": 749,
"preview": "import { ISandboxDirectory } from \"codesandbox-import-util-types\";\n\nexport function getDirectoryPaths(directories: ISand"
},
{
"path": "packages/import-utils/src/index.ts",
"chars": 8,
"preview": "// stub\n"
},
{
"path": "packages/import-utils/src/is-text.ts",
"chars": 758,
"preview": "import { isText as _isText } from \"istextorbinary\";\n\nconst jsRegex = /(t|j)sx?$/i;\n\nconst FILE_LOADER_REGEX =\n /\\.(ico|"
},
{
"path": "packages/import-utils/src/utils/files/__tests__/__snapshots__/denormalize.test.ts.snap",
"chars": 2706,
"preview": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n\nexports[`denormalize can create nested directories 1`] = `\n{\n \"directories\""
},
{
"path": "packages/import-utils/src/utils/files/__tests__/denormalize.test.ts",
"chars": 3438,
"preview": "import denormalize from \"../denormalize\";\n\nlet count = 0;\n\njest.mock(\"shortid\", () => ({\n generate: () => \"\" + count++,"
},
{
"path": "packages/import-utils/src/utils/files/denormalize.ts",
"chars": 3413,
"preview": "import { dirname, basename } from \"path\";\nimport {\n INormalizedModules,\n IModule,\n ISandboxFile,\n ISandboxDirectory,"
},
{
"path": "packages/import-utils/src/utils/files/normalize.ts",
"chars": 1145,
"preview": "import { join } from \"path\";\n\nimport {\n ISandboxFile,\n ISandboxDirectory,\n INormalizedModules,\n} from \"codesandbox-im"
},
{
"path": "packages/import-utils/tsconfig.json",
"chars": 4726,
"preview": "{\n \"compilerOptions\": {\n /* Basic Options */\n \"target\": \"es5\" /* Specify ECMAScript target version: 'ES3' (defaul"
},
{
"path": "packages/types/LICENSE",
"chars": 7487,
"preview": " GNU LESSER GENERAL PUBLIC LICENSE\n Version 3, 29 June 2007\n\nCopyright (C) 2018 "
},
{
"path": "packages/types/index.d.ts",
"chars": 1505,
"preview": "export interface IModule {\n content: string; // If isBinary is true this will be a URL\n isBinary: boolean;\n type?: \"f"
},
{
"path": "packages/types/package.json",
"chars": 125,
"preview": "{\n \"name\": \"codesandbox-import-util-types\",\n \"version\": \"2.2.3\",\n \"gitHead\": \"3cdcdea389d39f2a92be73dcb73496f68c8ada4"
},
{
"path": "tsconfig.json",
"chars": 4679,
"preview": "{\n \"compilerOptions\": {\n /* Basic Options */\n \"target\": \"es6\" /* Specify ECMAScript target version: 'ES3' (defaul"
}
]
About this extraction
This page contains the full source code of the codesandbox/codesandbox-importers GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 87 files (159.8 KB), approximately 43.5k tokens, and a symbol index with 181 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.