Repository: rsaryev/auto-copilot-cli Branch: main Commit: f2a23c9b1619 Files: 39 Total size: 65.1 KB Directory structure: gitextract_i_a95qrx/ ├── .eslintrc.json ├── .github/ │ ├── FUNDING.yml │ └── workflows/ │ └── npm-publish.yml ├── .gitignore ├── .prettierrc ├── LICENSE ├── README.md ├── cli.js ├── deployment/ │ └── deploy.bash ├── docs/ │ ├── chat.md │ ├── code-chat.md │ ├── code-review.md │ ├── config.md │ ├── pre-commit.md │ ├── refactor.md │ ├── shell.md │ ├── sql-translator.md │ └── test.md ├── package.json ├── src/ │ ├── commands/ │ │ ├── chat.ts │ │ ├── code-chat-command.ts │ │ ├── code-review.ts │ │ ├── generate-tests.ts │ │ ├── pre-commit.ts │ │ ├── refactor.ts │ │ ├── shell.ts │ │ └── sql-translator.ts │ ├── config/ │ │ └── config.ts │ ├── index.ts │ ├── llm/ │ │ └── index.ts │ ├── types.ts │ └── utils/ │ ├── error.ts │ ├── git.ts │ ├── helpers.ts │ ├── index.ts │ ├── inquirer.ts │ ├── language-extensions.ts │ └── update.ts └── tsconfig.json ================================================ FILE CONTENTS ================================================ ================================================ FILE: .eslintrc.json ================================================ { "parser": "@typescript-eslint/parser", "extends": ["plugin:@typescript-eslint/recommended"], "plugins": ["@typescript-eslint"], "rules": {} } ================================================ FILE: .github/FUNDING.yml ================================================ # These are supported funding model platforms github: [] ================================================ FILE: .github/workflows/npm-publish.yml ================================================ # This workflow will run tests using node and then publish a package to GitHub Packages when a release is created # For more information see: https://docs.github.com/en/actions/publishing-packages/publishing-nodejs-packages name: Node.js Package on: release: types: [created] workflow_dispatch: # Allows you to run this workflow manually from the Actions tab jobs: publish-npm: runs-on: ubuntu-latest environment: name: ohmyenv url: https://github.com steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: node-version: 18 registry-url: https://registry.npmjs.org/ - run: npm ci - run: npm run build - run: npm publish env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} ================================================ FILE: .gitignore ================================================ /.idea/ /dist/ /node_modules/ /.env /auto-copilot-cli.iml /config.json ================================================ FILE: .prettierrc ================================================ { "singleQuote": true, "trailingComma": "all", "tabWidth": 2, "printWidth": 120, "semi": true } ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2023 Saryev Rustam Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ [![npm](https://img.shields.io/npm/v/auto-copilot-cli)](https://www.npmjs.com/package/auto-copilot-cli) [![Node.js Package](https://github.com/rsaryev/auto-copilot-cli/actions/workflows/npm-publish.yml/badge.svg)](https://github.com/rsaryev/auto-copilot-cli/actions/workflows/npm-publish.yml) [![MIT License](https://img.shields.io/badge/license-MIT-blue)](https://github.com/transitive-bullshit/chatgpt-api/blob/main/license) [![auto-copilot-cli npm downloads](https://img.shields.io/npm/dt/auto-copilot-cli)](https://www.npmjs.com/package/auto-copilot-cli)

chat

## Description `auto-copilot-cli` is a versatile tool that offers several functionalities, including: - AI chat help you quickly find and improve codebase and answer questions about codebase - Code review - Pre-commit for generating commit messages - Code refactoring and linting structure of a folder or a file - Test generation - Shell command generation and execution - Natural language to SQL translation ## Setup 1. Install `auto-copilot-cli` globally: ```bash # using npm npm install -g auto-copilot-cli # using install script curl -s https://raw.githubusercontent.com/rsaryev/auto-copilot-cli/main/deployment/deploy.bash | bash ``` 2. Get an API key from [OpenAI](https://platform.openai.com/account/api-keys). 3. Refer to the [CLI usage](https://github.com/rsaryev/auto-copilot-cli/tree/main/docs) guide to learn how to use the tool. ### Commands - `code-chat ` - AI chat with codebase [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/code-chat.md) - Options: - `-p, --prompt ` - Prompt for AI - `code-review` - Perform code review [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/code-review.md) - Perform code review - `test ` - Generate test [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/test.md) - Options: - `-p, --prompt ` - Prompt for AI - `-o, --output ` - Output file - `refactor ` - Refactor code [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/refactor.md) - Options: - `-p, --prompt ` - Prompt for AI - `-o, --output ` - Output file - `sql-translator ` - Translate natural language to SQL [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/sql-translator.md) - Options: - `-o, --output ` - Output sql file - `-s, --schema-path ` - Path to schema file (sql, prisma, any format) - `chat ` - Chat with AI [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/chat.md) - Options: - `-p, --prompt ` - Prompt for AI - `shell ` - Generate and execute a shell command [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/shell.md) - `pre-commit` - Analyze git diff and generate a commit message [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/pre-commit.md) - Options: - `-y, --yes` - Skip confirmation - `config ` - Set configuration [usage](https://github.com/rsaryev/auto-copilot-cli/blob/main/docs/config.md) - `get-config` - Print configuration ### Options - `-h, --help` - Display help for command - `-V, --version` - Output the version number ## Contributing Contributions are always welcome! ================================================ FILE: cli.js ================================================ #!/usr/bin/env node require('./dist/index.js'); ================================================ FILE: deployment/deploy.bash ================================================ #!/bin/bash # Install Node.js version 18.16.0 if not already installed if ! node -v | grep -q "^v18\."; then echo "🚀 Node.js is not installed. Installing Node.js..." curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash source ~/.nvm/nvm.sh nvm install v18.16.0 fi if ! npm list -g | grep -q "auto-copilot-cli"; then echo "🚀 Node.js is installed. Installing auto-copilot-cli..." npm install -g auto-copilot-cli fi echo "🚀 auto-copilot-cli is installed. Running copilot --help" ================================================ FILE: docs/chat.md ================================================ # Chat with AI

chat

## Description Chat with AI ## Usage ```bash # Chat with AI $ copilot chat "How are you?" # Chat with AI with prompt $ copilot chat "How many types in typescript are there?" -p "Software Engineering" ``` ================================================ FILE: docs/code-chat.md ================================================ # AI chat with codebase

chat

## Description In the chat, you can ask questions about the codebase. AI will answer your questions, and if necessary, it will offer code improvements. This is very convenient when you want to quickly find something in the codebase, but don't want to waste time searching. It is also convenient when you want to improve a specific function, you can ask "How can I improve the function {function name}?" and AI will suggest improvements. Codebase is analyzed using openai. ## Usage code-chat works only with files of popular programming languages and additionally with .txt files. All other files will be ignored. ```bash $ copilot code-chat ./src ``` ================================================ FILE: docs/code-review.md ================================================ # Code review

code-review

## Description Perform code review and suggest improvements ## Usage Need to be in a git repository If you want some files not to be checked, then add to .gitignore ```bash # Code review $ copilot code-review ``` ================================================ FILE: docs/config.md ================================================ # Set Config ## Description Set configuration ## Usage ```bash # Set openai api key $ copilot config OPENAI_API_KEY # Set openai base url Default: https://api.openai.com/v1 $ copilot config OPEN_AI_BASE_URL # Set openai model Default: gpt-3.5-turbo $ copilot config MODEL # Set config commit with description Default: no copilot config INCLUDE_COMMIT_DESCRIPTION yes # Set config commit without description Default: no copilot config INCLUDE_COMMIT_DESCRIPTION no # Set config package manager Default: brew # For determine which package manager to recommend in generated shell scripts copilot config PACKAGE_MANAGER brew ``` Get Config ```bash # Print config $ copilot get-config ``` ================================================ FILE: docs/pre-commit.md ================================================ # Pre-commit

Pre-commit

## Description Analyze git diff and generate a commit message ## Usage Need to be in a git repository If you want some files not to be checked, then add to .gitignore ```bash # Analyzes git diff and generates a commit message $ copilot pre-commit # Analyzes git diff and generates a commit message with skip confirmation $ copilot pre-commit -y ``` ================================================ FILE: docs/refactor.md ================================================ # Refactor code

refactor

## Description Refactor code ## Usage ```bash # Refactor code $ copilot refactor ./server.js # Refactor code with prompt $ copilot refactor ./server.js -p "use typescript" # Refactor code with prompt and output $ copilot refactor ./server.js -p "use typescript" -o ./server.ts ``` ================================================ FILE: docs/shell.md ================================================ # Generate and execute a shell command

shell

## Description Generate and execute a shell command ## Usage ```bash # Convert all mov files to gif $ copilot shell "convert all mov files to gif" # Rename all files in the current directory to lowercase $ copilot shell "rename files in the current directory to lowercase" # Convert all images in the current directory to size 100x100 $ copilot shell "convert all images in the current directory to size 100x100" # Create a file with implementation of binary search $ copilot shell "create a js file with implementation of binary search" # Create a simple web server in Node.js using Koajs $ copilot shell "create a simple web server in Node.js using Koajs" # Start PostgreSQL in Docker $ copilot shell "start PostgreSQL in Docker" ``` ================================================ FILE: docs/sql-translator.md ================================================ # SQL Translator

sql-translator

## Description Translate natural language to SQL ## Usage ```bash # Translate natural language to SQL $ copilot sql-translator "get all last posts of users" # Translate natural language to SQL with output $ copilot sql-translator "get all last posts of users" # Translate natural language to SQL with output and sql $ copilot sql-translator "get all last posts of users" -s ./schema.sql # Translate natural language to SQL with output and prisma schema $ copilot sql-translator "get all last posts of users" -s ./schema.prisma ``` ================================================ FILE: docs/test.md ================================================ # Generate test

sql-translator

## Description Generate test ## Usage ```bash # Generate test $ copilot test ./server.js # Generate test with prompt $ copilot test ./server.js -p "use jest framework" # Generate test with prompt and output $ copilot test ./server.js -p "use jest framework" -o ./server.test.js ``` ================================================ FILE: package.json ================================================ { "name": "auto-copilot-cli", "version": "1.1.2", "main": "index.js", "description": "This CLI tool uses the ChatGPT language model to create commands. This allows you to create a list of tasks and perform them sequentially, optimizing your workflow and increasing the efficiency of repetitive actions..", "scripts": { "build": "tsc", "lint": "eslint . --ext .ts", "lint:fix": "eslint . --ext .ts --fix", "pretty": "prettier --write ." }, "bin": { "auto-copilot-cli": "cli.js", "copilot": "cli.js" }, "files": [ "dist", "cli.js", "README.md", "config.json", "demo" ], "author": "Rustam Saryev", "license": "ISC", "devDependencies": { "@types/cli-table": "^0.3.1", "@types/inquirer": "^9.0.3", "@types/node": "^18.16.3", "@typescript-eslint/eslint-plugin": "^5.59.2", "@typescript-eslint/parser": "^5.59.2", "eslint": "^8.39.0", "eslint-plugin-import": "^2.27.5", "prettier": "^2.8.8", "ts-node": "^10.9.1", "typescript": "^5.0.4" }, "dependencies": { "@dqbd/tiktoken": "^1.0.7", "ai-validator": "^1.0.76", "ajv": "^8.12.0", "axios": "^1.4.0", "chalk": "^4.1.2", "cli-table": "^0.3.11", "commander": "^10.0.1", "directory-tree": "^3.5.1", "hnswlib-node": "^1.4.2", "inquirer": "^8.2.5", "langchain": "^0.0.94", "openai": "^3.2.1", "ora": "^5.4.1", "semver": "^7.5.0", "simple-git": "^3.18.0", "typeorm": "^0.3.15", "winston": "^3.8.2" }, "repository": { "type": "git", "url": "git+https://github.com/rsaryev/auto-copilot-cli.git" }, "keywords": [ "copilot", "cli", "chatgpt", "openai" ], "bugs": { "url": "https://github.com/rsaryev/auto-copilot-cli/issues" }, "homepage": "https://github.com/rsaryev/auto-copilot-cli#readme" } ================================================ FILE: src/commands/chat.ts ================================================ import {Command} from '../types'; import {LLMChat} from '../llm'; import * as readline from 'readline'; import {inputAsk} from '../utils'; export class ChatCommand extends Command { async execute( message: string, options: { prompt?: string; }, ): Promise { const {prompt} = options; let input = ''; while (input !== 'exit') { input = await inputAsk(); await LLMChat.chat({ config: this.config, input, prompt, handleLLMStart: () => { readline.cursorTo(process.stdout, 0); process.stdout.write('🤖 '); }, handleLLMEnd: () => { process.stdout.write('\n'); }, handleLLMError: () => { process.stdout.write('\n'); }, handleLLMNewToken: (token: string) => { process.stdout.write(token); }, }); } } } ================================================ FILE: src/commands/code-chat-command.ts ================================================ import {Command} from '../types'; import {LLMCodeChat} from '../llm'; import * as readline from 'readline'; export class CodeChatCommand extends Command { async execute( directory: string, options: { prompt?: string; }, ): Promise { await LLMCodeChat.chat({ config: this.config, directory, input: '', prompt: options.prompt, handleLLMStart: () => { readline.cursorTo(process.stdout, 0); process.stdout.write('🤖 '); }, handleLLMEnd: () => { process.stdout.write('\n'); }, handleLLMError: () => { process.stdout.write('\n'); }, handleLLMNewToken: (token: string) => { process.stdout.write(token); }, }); } } ================================================ FILE: src/commands/code-review.ts ================================================ import {Command} from '../types'; import {LLMCodeReview} from '../llm'; import chalk from 'chalk'; import path from 'path'; import fs from 'fs'; import {gitDiffFiles} from '../utils/git'; export class CodeReviewCommand extends Command { async execute(message: string, options: { yes?: string }): Promise { const diffFiles = await gitDiffFiles(); if (diffFiles.length === 0) { console.log(`${chalk.red('✘')} No files to review, use git add to add files to review`); return; } console.log(`${chalk.green('✔')} Found ${diffFiles.length} files to review`); const logPath = path.resolve(process.cwd(), 'review.log'); const writeStream = fs.createWriteStream(logPath, {flags: 'a'}); console.log(`${chalk.green('✔')} ${chalk.yellow('Writing review log to')} ${logPath}\n`); for (const file of diffFiles) { console.log(`${chalk.green('✔')} ${chalk.yellow('Reviewing')} ${file}`); const filePath = path.resolve(process.cwd(), file); const content = fs.readFileSync(filePath, 'utf-8'); if (content === '') { console.log(`${chalk.red('✘')} ${chalk.yellow('Skip empty file')}`); continue; } if (content.length > 10000) { console.log(`${chalk.red('✘')} ${chalk.yellow('Skip large file')}`); continue; } await LLMCodeReview.codeReview({ config: this.config, content, filePath, handleLLMStart: async () => { process.stdout.write('\n'); }, handleLLMEnd: async () => { process.stdout.write('\n'); }, handleLLMError: async () => { process.stdout.write('\n'); }, handleLLMNewToken: async (token: string) => { process.stdout.write(token); writeStream.write(token); }, }); } } } ================================================ FILE: src/commands/generate-tests.ts ================================================ import {Command} from '../types'; import fs from 'fs'; import chalk from 'chalk'; import {askOpenEditor, askTest, inputTest} from '../utils'; import {exec} from 'child_process'; import ora from 'ora'; import {LLMCode} from '../llm'; export class TestCommand extends Command { async execute( path: string, { prompt, output, }: { prompt?: string; output?: string; }, ): Promise { if (!fs.existsSync(path)) { console.error(`${chalk.red('✘')} no such file or directory: ${path}`); return; } const fileType = path.split('.').pop(); if (!fileType) { console.error(`${chalk.red('✘')} invalid file type: ${path}`); return; } output = output || path.replace(`.${fileType}`, `.test.${fileType}`); const questionOpenCode = await askOpenEditor(); if (questionOpenCode) { exec(`${this.config.EDITOR || 'code'} ${output}`); } const spinner = ora('Generating tests'); const content = fs.readFileSync(path, 'utf-8'); const handleLLMStart = () => spinner.start(); const handleLLMEnd = () => spinner.succeed('Successfully generated tests'); const handleLLMError = () => spinner.fail(); await LLMCode.generateTest({ config: this.config, content, prompt: prompt, output: output, handleLLMStart, handleLLMEnd, handleLLMError, }); const answer = await askTest(); if (answer) { prompt = await inputTest(); await this.execute(output, { prompt, output, }); } } } ================================================ FILE: src/commands/pre-commit.ts ================================================ import {Command} from '../types'; import {LLMPreCommit} from '../llm'; import {exec} from 'child_process'; import {promisify} from 'util'; import {askCommit, askRetryCommit} from '../utils'; import ora from 'ora'; import {gitDiffCommand} from '../utils/git'; export class PreCommitCommand extends Command { async execute( message: string, options: { yes?: string; }, ): Promise { const spinner = ora('Analyzing').start(); try { const {config} = this; const diff = await gitDiffCommand(); if (!diff) { spinner.succeed('No diff found using git add'); return; } const {title, messages} = await LLMPreCommit.preCommit({config, diff}); spinner.stop(); const commitBullets = messages?.map((message) => `- ${message}`).join('\n') ?? ''; const fullCommitMessage = `"${title}${commitBullets ? `\n\n${commitBullets}` : ''}"`; const shouldCommit = options.yes ? true : await askCommit(fullCommitMessage); if (shouldCommit) { spinner.text = 'Committing'; await promisify(exec)(`git commit -m ${fullCommitMessage}`); spinner.succeed('Successfully committed'); } else { const shouldRetry = await askRetryCommit(); if (shouldRetry) await this.execute(message, options); } } catch (error) { spinner.fail('Failed to commit'); throw error; } } } ================================================ FILE: src/commands/refactor.ts ================================================ import {Command} from '../types'; import fs from 'fs'; import chalk from 'chalk'; import {askOpenEditor, askRetryRefactor, inputRefactor} from '../utils'; import {exec} from 'child_process'; import ora from 'ora'; import {LLMCode} from '../llm'; export class RefactorCommand extends Command { public async execute( filePath: string, { prompt, output, }: { prompt?: string; output?: string; }, ): Promise { if (!fs.existsSync(filePath)) { console.error(`${chalk.red('✘')} no such file or directory: ${filePath}`); return; } const fileType = filePath.split('.').pop(); if (!fileType) { console.error(`${chalk.red('✘')} invalid file type: ${filePath}`); return; } output = output || filePath.replace(`.${fileType}`, `.refactored.${fileType}`); const questionOpenCode = await askOpenEditor(); if (questionOpenCode) { exec(`${this.config.EDITOR || 'code'} ${output}`); } const spinner = ora('Refactoring'); const content = fs.readFileSync(filePath, 'utf-8'); const handleLLMStart = () => spinner.start(); const handleLLMEnd = () => spinner.succeed('Successfully refactored'); const handleLLMError = () => spinner.fail(); await LLMCode.refactor({ config: this.config, content, prompt: prompt, output: output, handleLLMStart, handleLLMEnd, handleLLMError, }); const answer = await askRetryRefactor(); if (answer) { const input = await inputRefactor(); await this.execute(output, { prompt: input, output, }); } } } ================================================ FILE: src/commands/shell.ts ================================================ import {Command} from '../types'; import path from 'path'; import fs from 'fs'; import chalk from 'chalk'; import {randomUUID} from 'crypto'; import {executeCommand, executeShell, exFunction} from '../utils/helpers'; import {askExecute, askOpenEditor} from '../utils'; import {LLMGenerateShell} from '../llm'; import os from 'os'; export class ShellCommand extends Command { async execute(goal: string): Promise { const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'auto_copilot_cli')); const pathToSaveShellScript = path.join(tempDir, `./${randomUUID()}.sh`); const shellScript = await exFunction( LLMGenerateShell.generateShell.bind(null, this.config, goal), 'Pending', 'Done', ); fs.writeFileSync(pathToSaveShellScript, shellScript.shellScript); console.log(`${shellScript.isDangerous ? chalk.red('✘') : chalk.green('✔')} Safe | ${shellScript.description}`); const questionOpenScript = await askOpenEditor(); if (questionOpenScript) { const command = `${this.config.EDITOR || 'code'} ${pathToSaveShellScript}`; await executeCommand(command); } const isApproved = await askExecute(); if (isApproved) { const shellScriptModified = fs.readFileSync(pathToSaveShellScript, 'utf-8'); await executeShell(shellScriptModified.toString()); } } } ================================================ FILE: src/commands/sql-translator.ts ================================================ import {Command} from '../types'; import fs from 'fs'; import chalk from 'chalk'; import {askOpenEditor, askRetryRefactor, inputRefactor} from '../utils'; import {exec} from 'child_process'; import ora from 'ora'; import {LLMCode} from '../llm'; export class SqlTranslatorCommand extends Command { public async execute( query: string, { output, schemaPath, }: { output?: string; schemaPath?: string; }, ): Promise { if (schemaPath && !fs.existsSync(schemaPath)) { console.error(`${chalk.red('✘')} no such file or directory: ${schemaPath}`); return; } output = output || 'output.sql'; const questionOpenCode = await askOpenEditor(); if (questionOpenCode) { exec(`${this.config.EDITOR || 'code'} ${output}`); } const spinner = ora('Translating'); const schema = schemaPath ? fs.readFileSync(schemaPath, 'utf-8') : ''; const handleLLMStart = () => spinner.start(); const handleLLMEnd = () => spinner.succeed('Successfully translated'); const handleLLMError = () => spinner.fail(); await LLMCode.translateSql({ config: this.config, content: schema, prompt: query, output, handleLLMStart, handleLLMEnd, handleLLMError, }); const answer = await askRetryRefactor(); if (answer) { const input = await inputRefactor(); await this.execute(input, { schemaPath, output, }); } } } ================================================ FILE: src/config/config.ts ================================================ import * as fs from 'fs'; import * as path from 'path'; import {IConfig} from '../types'; const configPath = path.join(__dirname, '../../config.json'); const defaultConfig: IConfig = { OPENAI_API_KEY: 'sk-xxx', TEMPERATURE: 0, MODEL: 'gpt-3.5-turbo-0613', EDITOR: 'code', OPEN_AI_BASE_URL: 'https://api.openai.com/v1', INCLUDE_COMMIT_DESCRIPTION: 'no', PACKAGE_MANAGER: 'brew', }; export function setConfig(config: IConfig): void { fs.writeFileSync(configPath, JSON.stringify(config, null, 2), 'utf8'); } // Set a specific configuration property by key export function setConfigByKey(key: K, value: IConfig[K]): void { const config = getConfig(); config[key] = value; setConfig(config); } export function getConfig(): IConfig { if (!fs.existsSync(configPath)) { setConfig(defaultConfig); } const existingConfig = JSON.parse(fs.readFileSync(configPath, 'utf8')); if (!existingConfig.OPENAI_API_KEY) { existingConfig.OPENAI_API_KEY = defaultConfig.OPENAI_API_KEY; setConfig(existingConfig); } return {...defaultConfig, ...existingConfig}; } ================================================ FILE: src/index.ts ================================================ import { Command } from 'commander'; // @ts-ignore import { version } from '../package.json'; import { getConfig, setConfig, setConfigByKey } from './config/config'; import { TestCommand } from './commands/generate-tests'; import { RefactorCommand } from './commands/refactor'; import { ChatCommand } from './commands/chat'; import { ShellCommand } from './commands/shell'; import { IConfig } from './types'; import axios, { AxiosError } from 'axios'; import { askOpenAIKey } from './utils'; import chalk from 'chalk'; import { PreCommitCommand } from './commands/pre-commit'; import { checkNodeVersion } from './utils/helpers'; import { checkUpdate } from './utils/update'; import { SqlTranslatorCommand } from './commands/sql-translator'; import { CodeReviewCommand } from './commands/code-review'; import { checkGitExists } from './utils/git'; import { CodeChatCommand } from './commands/code-chat-command'; const program: Command = new Command() .name('auto-copilot-cli') .description('Auto Copilot CLI') .version(version) .alias('copilot'); type IOption = { name: string; description: string; required: boolean; }; type ICommand = { name: string; description: string; args: string; options: IOption[]; action: (...args: any[]) => Promise; }; const testCommand: ICommand = { name: 'test', description: 'Generate test', args: '', options: [ { name: '-p, --prompt ', description: 'Prompt for AI', required: false, }, { name: '-o, --output ', description: 'Output file', required: false, }, ], action: async (file: string, options: { prompt?: string; output?: string }): Promise => { const config: IConfig = getConfig(); const testCommand: TestCommand = new TestCommand(config); await testCommand.execute(file, options); }, }; const refactorCommand: ICommand = { name: 'refactor', description: 'Refactor code', args: '', options: [ { name: '-p, --prompt ', description: 'Prompt for AI', required: false, }, { name: '-o, --output ', description: 'Output file', required: false, }, ], action: async (file: string, options: { prompt?: string; output?: string }): Promise => { const config: IConfig = getConfig(); const refactorCommand: RefactorCommand = new RefactorCommand(config); await refactorCommand.execute(file, options); }, }; const sqlTranslatorCommand: ICommand = { name: 'sql-translator', description: 'Translate natural language to SQL', args: '', options: [ { name: '-o, --output ', description: 'Output sql file', required: false, }, { name: '-s, --schema-path ', description: 'Path to schema file (sql, prisma, any format)', required: false, }, ], action: async (query: string, options: { schemaPath?: string; output?: string }): Promise => { const config: IConfig = getConfig(); const sqlCommand: SqlTranslatorCommand = new SqlTranslatorCommand(config); await sqlCommand.execute(query, options); }, }; const chatCommand: ICommand = { name: 'chat', description: 'Chat with AI', args: '', options: [ { name: '-p, --prompt ', description: 'Prompt for AI', required: false, }, ], action: async (message: string, options: { prompt?: string }): Promise => { const config: IConfig = getConfig(); const chatCommand: ChatCommand = new ChatCommand(config); await chatCommand.execute(message, options); }, }; const shellCommand: ICommand = { name: 'shell', description: 'Generate and execute a shell command', args: '', options: [], action: async (goal: string): Promise => { const config: IConfig = getConfig(); const shellCommand: ShellCommand = new ShellCommand(config); await shellCommand.execute(goal); }, }; const configCommand: ICommand = { name: 'config', description: 'Set config', args: ' ', options: [], action: async (key: keyof IConfig, value: string): Promise => setConfigByKey(key, value), }; const getConfigCommand: ICommand = { name: 'get-config', description: 'Print config', args: '', options: [], action: async (): Promise => { const config: any = getConfig(); console.table(Object.keys(config).map((key: string) => ({ key, value: config[key] }))); }, }; const preCommitCommand: ICommand = { name: 'pre-commit', description: 'Pre commit hook', args: '', options: [ { name: '-y, --yes', description: 'Skip confirmation', required: false, }, ], action: async (options: { yes?: string }): Promise => { await checkGitExists(); const config: IConfig = getConfig(); const preCommitCommand: PreCommitCommand = new PreCommitCommand(config); await preCommitCommand.execute('', options); }, }; const codeReviewCommand: ICommand = { name: 'code-review', description: 'Code review', args: '', options: [ { name: '-y, --yes', description: 'Skip confirmation', required: false, }, ], action: async (options: { yes?: string }): Promise => { await checkGitExists(); const config: IConfig = getConfig(); const codeReviewCommand = new CodeReviewCommand(config); await codeReviewCommand.execute('', options); }, }; const codeChatCommand: ICommand = { name: 'code-chat', description: 'Chat with AI about code', args: '', options: [ { name: '-p, --prompt ', description: 'Prompt for AI', required: false, }, ], action: async (directory: string, options: { prompt?: string }): Promise => { const config: IConfig = getConfig(); const codeChatCommand: CodeChatCommand = new CodeChatCommand(config); await codeChatCommand.execute(directory, options); }, }; const commands: ICommand[] = [ testCommand, refactorCommand, chatCommand, shellCommand, configCommand, getConfigCommand, preCommitCommand, sqlTranslatorCommand, codeReviewCommand, codeChatCommand, ]; async function main() { checkNodeVersion(); await checkUpdate(); commands.forEach(({ name, description, args, options, action }) => { const command: Command = new Command(name).description(description); if (args) { command.arguments(args); } options.forEach(({ name, description, required }) => { command.option(name, description, required); }); const handler = async (...args: any[]): Promise => { const config: IConfig = getConfig(); try { await action(...args); } catch (error: any) { if (axios.isAxiosError(error)) { if ((error as AxiosError).response?.status === 401) { config.OPENAI_API_KEY = await askOpenAIKey(); setConfig(config); return handler(...args); } else if ((error as AxiosError).response?.status === 429) { console.log(`${chalk.red('✘')} ${chalk.yellow('You have reached your OpenAI API usage limit')}`); return; } else if ((error as AxiosError).response?.status === 500) { console.log(`${chalk.red('✘')} ${chalk.yellow('OpenAI API is down')}`); return; } } console.log(`${chalk.red('✘')} ${error.response?.data?.error?.message || error.message}`); } }; command.action(handler); program.addCommand(command); }); program.parse(process.argv); } main(); ================================================ FILE: src/llm/index.ts ================================================ import {PromptTemplate} from 'langchain/prompts'; import {z} from 'zod'; import * as os from 'os'; import {IChatParams, IConfig, IRefactorParams, ShellScriptResponse} from '../types'; import {OpenAI, OpenAIChat} from 'langchain/llms/openai'; import fs from 'fs'; import {throwLLMParseError} from '../utils/error'; import {ChatCompletionRequestMessage} from 'openai'; import path from 'path'; import {AiValidator} from 'ai-validator'; import {calculateCost, getPackageManagerByOs} from '../utils/helpers'; import {OpenAIEmbeddings} from 'langchain/embeddings/openai'; import {RecursiveCharacterTextSplitter} from 'langchain/text_splitter'; import {TextLoader} from 'langchain/document_loaders/fs/text'; import {customAsk, inputAsk} from '../utils'; import ora from 'ora'; import {DirectoryLoader} from 'langchain/document_loaders/fs/directory'; import {extensionsList} from '../utils/language-extensions'; import * as process from 'process'; import {HNSWLib} from 'langchain/vectorstores/hnswlib'; export class LLMCommand { protected llm: OpenAI; protected config: IConfig; constructor(config: IConfig, maxTokens: number, streaming: boolean, temperature = 0) { this.config = config; this.llm = new OpenAI( { modelName: config.MODEL, maxTokens, temperature, openAIApiKey: config.OPENAI_API_KEY, streaming, }, { basePath: config.OPEN_AI_BASE_URL, }, ); } } export class LLMGenerateShell extends LLMCommand { constructor(config: IConfig) { super(config, 1024, false); } static async generateShell(config: IConfig, prompt: string): Promise { return new LLMGenerateShell(config).generateShell(prompt); } async generateShell(prompt: string): Promise { const packageManager = this.config.PACKAGE_MANAGER || getPackageManagerByOs(); const schema = z.object({ shellScript: z.string().describe(`shell script with comments`), isDangerous: z.boolean().describe(`if the shell is very dangerous, it will be marked as dangerous`), description: z.string().describe(`short description`), }); const validator = AiValidator.input` Goal: Write the best shell script based on the prompt: \`${prompt}\` Constraints for the shell script: - should be compatible with the ${os.platform()}. - Should work without user intervention and should not require keyboard input. - Every step should be printed to the console so that the user can understand what is happening. - Check the installed packages and install the missing packages if necessary. - If you need to create a file use operator "Here Document" (<<) to create a multiline string: \`\`\` cat << EOF > file.txt {{content}} EOF \`\`\` - Use package manager ${packageManager} Recommendations: - Use best practices - Use the best tools for the job - Use the best practices for writing shell scripts ${schema} The current time and date is ${new Date().toLocaleString()} The current working directory is ${process.cwd()} The current os platform is ${os.platform()} `; const response = await this.llm.call(validator.prompt()); try { return validator.parse(response); } catch (error) { return throwLLMParseError(); } } } export class LLMCode extends LLMCommand { constructor(config: IConfig) { super(config, 2056, true); } static async refactor( params: IRefactorParams & { config: IConfig; }, ): Promise { return new LLMCode(params.config).refactor(params); } static async generateTest( params: IRefactorParams & { config: IConfig; }, ): Promise { return new LLMCode(params.config).generateTest(params); } static async translateSql( params: IRefactorParams & { config: IConfig; }, ): Promise { return new LLMCode(params.config).translateSql(params); } async translateSql(params: IRefactorParams): Promise { const promptTemplate = new PromptTemplate({ template: ` Goal: Based on the following prompt translate the natural language to sql. Constraints: - The sql should be formatted according to the standard for that sql language. Recommendations: - Use the best practices for writing sql. Output format: - Should be only sql, otherwise the answer will be rejected. The prompt: {prompt} The schema: {schema} `, inputVariables: ['output', 'prompt', 'schema'], }); const writeStream = fs.createWriteStream(params.output); const input = await promptTemplate.format({ prompt: params.prompt, output: params.output, schema: params.content.trim(), }); await this.llm.call(input, undefined, [ { handleLLMStart: params.handleLLMStart, handleLLMNewToken(token: string) { writeStream.write(token); }, handleLLMEnd() { params.handleLLMEnd(); writeStream.end(); }, handleLLMError(e): Promise | void { params.handleLLMError(e); writeStream.end(); }, }, ]); } async generateTest({ content, output, prompt, handleLLMStart, handleLLMEnd, handleLLMError, }: IRefactorParams): Promise { const promptTemplate = new PromptTemplate({ template: ` Goal: Generate tests for the following code as much as possible. Constraints: - The code should be formatted according to the standard for that programming language. Recommendations: - Use the best testing framework for the programming language. Output format: - Should be only tests code, otherwise the answer will be rejected. ${prompt ? `Prompt for generating tests: \`\`\`${prompt}\`\`\`` : ''} The content: {content} `, inputVariables: ['content', 'date', 'output'], }); const input = await promptTemplate.format({ content, date: new Date().toISOString(), prompt, output, }); const writeStream = fs.createWriteStream(output); await this.llm.call(input, undefined, [ { handleLLMStart, handleLLMNewToken(token: string) { writeStream.write(token); }, handleLLMEnd() { handleLLMEnd(); writeStream.end(); }, handleLLMError(e): Promise | void { handleLLMError(e); writeStream.end(); }, }, ]); } async refactor({ content, output, prompt, handleLLMStart, handleLLMEnd, handleLLMError, }: IRefactorParams): Promise { const promptTemplate = new PromptTemplate({ template: `Refactor and fix the following content Constraints: - If this is code in a programming language, it must be formatted according to the standard for that programming language and run without errors. Recommendations: - Use best practices for the content. Answer format: - Return only refactored valid content, otherwise the answer will be rejected. ${prompt ? `Prompt for refactoring: \`\`\`${prompt}\`\`\`` : ''} The content: {content} `, inputVariables: ['content', 'date'], }); const input = await promptTemplate.format({ content, date: new Date().toISOString(), prompt, }); const writeStream = fs.createWriteStream(output); await this.llm.call(input, undefined, [ { handleLLMStart, handleLLMNewToken(token: string) { writeStream.write(token); }, handleLLMEnd() { handleLLMEnd(); writeStream.end(); }, handleLLMError(e): Promise | void { handleLLMError(e); writeStream.end(); }, }, ]); } } export class LLMChat extends LLMCommand { static messages: ChatCompletionRequestMessage[] = []; private llmChat: OpenAIChat; constructor(config: IConfig) { super(config, 1024, false); this.llmChat = new OpenAIChat({ prefixMessages: LLMChat.messages, modelName: config.MODEL, maxTokens: 256, temperature: 0, openAIApiKey: config.OPENAI_API_KEY, streaming: true, }); } static async chat( params: IChatParams & { config: IConfig; }, ): Promise { return new LLMChat(params.config).chat(params); } async chat({ input, prompt, handleLLMNewToken, handleLLMStart, handleLLMEnd, handleLLMError, }: IChatParams): Promise { const messages = LLMChat.messages; if (input === '') { LLMChat.messages = []; handleLLMStart(); handleLLMNewToken('Chat history cleared'); return handleLLMEnd(); } if (messages.length === 0) { messages.push({ role: 'system', content: prompt || 'You are a helpful assistant that answers in language understandable to humans.', }); } const answer = await this.llmChat.call(input, undefined, [ { handleLLMNewToken, handleLLMStart, handleLLMEnd, handleLLMError, }, ]); messages.push({ role: 'user', content: input, }); messages.push({ role: 'assistant', content: answer, }); } } export class LLMPreCommit extends LLMCommand { constructor(config: IConfig) { super(config, 256, false, 0.7); } static async preCommit(params: { config: IConfig; diff: string }): Promise<{ title: string; messages?: string[]; }> { return new LLMPreCommit(params.config).preCommit(params.diff); } async preCommit(diff: string): Promise<{ title: string; messages?: string[]; }> { const schemaWithMessages = z.object({ title: z.string().describe('The title of short description of the changes'), messages: z.array(z.string()).describe('paragraphs describing the changes'), }); const schemaWithOptionalMessages = z.object({ title: z.string().describe('The title of the commit'), }); const schema = this.config.INCLUDE_COMMIT_DESCRIPTION === 'yes' ? schemaWithMessages : schemaWithOptionalMessages; const validator = AiValidator.input`You are reviewing the git diff and writing a git commit. Constraints: - Use format Conventional Commits. ${schema} The git diff: \`\`\`${diff}\`\`\` `; const response = await this.llm.call(validator.prompt()); try { return await validator.parse(response.replace(/\\n/g, '\n')); } catch (error) { return throwLLMParseError(); } } } export class LLMCodeReview extends LLMCommand { constructor(config: IConfig) { super(config, 256, true, 0); } static async codeReview(params: { config: IConfig; content: string; filePath: string; handleLLMNewToken: (token: string) => Promise; handleLLMStart: () => Promise; handleLLMEnd: () => Promise; handleLLMError: (error: Error) => Promise; }): Promise { return new LLMCodeReview(params.config).codeReview({ content: params.content, filePath: params.filePath, handleLLMNewToken: params.handleLLMNewToken, handleLLMStart: params.handleLLMStart, handleLLMEnd: params.handleLLMEnd, handleLLMError: params.handleLLMError, }); } async codeReview(params: { content: string; filePath: string; handleLLMNewToken: (token: string) => Promise; handleLLMStart: () => Promise; handleLLMEnd: () => Promise; handleLLMError: (error: Error) => Promise; }): Promise { const fullFilePath = path.resolve(process.cwd(), params.filePath); const promptTemplate = new PromptTemplate({ template: `You are an automatic assistant who helps with Code Review. The goal is to improve the quality of the code and ensure the effective operation of the application in terms of security, scalability, and ease of maintenance. During the analysis, you should pay attention to the use of best programming practices, code optimization, security, and compliance with coding standards. Constraints: - Always specify where exactly in the code "In ${fullFilePath}:line:column" and what exactly "Need to fix like this". - Do not suggest fixes that do not improve the code or fix errors. - Be concise and accurate. Answer only valid, otherwise the answer will be rejected. \`\`\` 🤖 ${fullFilePath}:{{line}}:{{column}} 💡 {{suggestion}} \`\`\`, \`\`\`{code}\`\`\``, inputVariables: ['code'], }); const codeWithLineNumbers = params.content .split('\n') .map((line, index) => `/*${index + 1}*/ ${line}`) .join('\n') .trim(); const input = await promptTemplate.format({ code: codeWithLineNumbers, }); const response = await this.llm.call(input, undefined, [ { handleLLMNewToken: params.handleLLMNewToken, handleLLMStart: params.handleLLMStart, handleLLMEnd: params.handleLLMEnd, handleLLMError: params.handleLLMError, }, ]); return response; } } export class LLMCodeChat extends LLMCommand { private vectorStore: any; constructor(config: IConfig) { super(config, 1024, true); } static async chat({ config, directory, ...params }: IChatParams & { config: IConfig; directory: string }): Promise { const llmCodeChat = new LLMCodeChat(config); await llmCodeChat.getOrCreateVectorStore(directory); return llmCodeChat.chat(params); } async chat(params: IChatParams): Promise { const messages: ChatCompletionRequestMessage[] = [ { role: 'system', content: params.prompt || `You are given from the vector store the most relevant code that you can use to solve the user request. Try to answer user questions briefly and clearly.`, }, ]; while (true) { const input = await inputAsk(); const relevantCode = await this.vectorStore.asRetriever(4).getRelevantDocuments(input); if (relevantCode.length === 0) { console.log("🤖 Sorry, I don't found any code for your question."); return this.chat(params); } const llmChat = new OpenAIChat({ prefixMessages: messages.concat({ role: 'user', content: relevantCode .map((doc) => doc.pageContent) .join('\n') .replace(/\n/g, ' ') .trim(), }), modelName: this.config.MODEL, temperature: 0, openAIApiKey: this.config.OPENAI_API_KEY, streaming: true, }); await llmChat.call(input, undefined, [ { handleLLMNewToken: params.handleLLMNewToken, handleLLMStart: params.handleLLMStart, handleLLMEnd: params.handleLLMEnd, handleLLMError: params.handleLLMError, }, ]); relevantCode.forEach((doc) => { console.log(`📄 ${doc.metadata.source}:`); }); } } private async getOrCreateVectorStore(directory: string): Promise { const vectorStorePath = path.resolve(directory, 'vector-store'); if (fs.existsSync(vectorStorePath)) { const store = await HNSWLib.load( vectorStorePath, new OpenAIEmbeddings({ openAIApiKey: this.config.OPENAI_API_KEY, }), ); const input = await customAsk(`Found existing vector store. Do you want to use it? (y/n) `); if (input) { this.vectorStore = store; return; } } const loader = new DirectoryLoader( directory, extensionsList.reduce((acc, ext) => { acc[ext] = (path) => new TextLoader(path); return acc; }, {}), ); const rawDocs = await loader.load(); const textSplitter = new RecursiveCharacterTextSplitter({chunkSize: 500, chunkOverlap: 50}); const docs = await textSplitter.splitDocuments(rawDocs); const cost = await calculateCost( this.config.MODEL, docs.map((doc) => doc.pageContent), ); const input = await customAsk( `🤖 Creating a vector store for ${rawDocs.length} documents will cost ~$${cost.toFixed( 5, )}. Do you want to continue? (y/n) `, ); if (!input) { console.log('🤖 Bye!'); process.exit(0); } const spinner = ora('Loading vector store...').start(); this.vectorStore = await HNSWLib.fromDocuments( docs, new OpenAIEmbeddings({ openAIApiKey: this.config.OPENAI_API_KEY, }), ); await this.vectorStore.save(vectorStorePath); spinner.succeed(`Created vector store with ${rawDocs.length} documents`); } } ================================================ FILE: src/types.ts ================================================ export interface IConfig { OPENAI_API_KEY: string; TEMPERATURE: number; MODEL: string; EDITOR: string; OPEN_AI_BASE_URL: string; INCLUDE_COMMIT_DESCRIPTION: string; PACKAGE_MANAGER: string; } export interface ShellScriptResponse { shellScript: string; isDangerous: boolean; description: string; } export interface IRefactorParams { content: string; output: string; prompt?: string; handleLLMStart: () => void; handleLLMEnd: () => void; handleLLMError: (e: any) => void; } export interface IChatParams { input: string; prompt?: string; handleLLMNewToken: (token: string) => void; handleLLMStart: () => void; handleLLMEnd: () => void; handleLLMError: (e: Error) => void; } export abstract class Command { protected readonly config: IConfig; constructor(config: IConfig) { this.config = config; } abstract execute(args: string, options: Record): Promise; } export interface IAnalyseParams { errorOutput: string; command: string; handleLLMNewToken: (token: string) => void; handleLLMStart?: () => void; handleLLMEnd?: () => void; handleLLMError?: (e: Error) => void; } ================================================ FILE: src/utils/error.ts ================================================ class BaseError extends Error { constructor(message: string) { super(message); } } export class LLMError extends BaseError { constructor(message: string) { super(message); } } export const throwLLMParseError = (): never => { throw new LLMError('Failed to parse the response from the LLM'); }; ================================================ FILE: src/utils/git.ts ================================================ import simpleGit from 'simple-git'; import chalk from 'chalk'; import { excludePackagesFiles, extensions } from './language-extensions'; const git = simpleGit(); export async function checkGitExists() { const gitExists = await git.checkIsRepo(); if (!gitExists) { console.error(`${chalk.red('✘')} need to be in a git repository`); process.exit(1); } } export async function getGitIgnoreFiles() { const gitIgnoreFiles = await git.raw(['ls-files', '--others', '--exclude-standard', '-i', '--directory', '--cached']); return gitIgnoreFiles.split('\n').filter(Boolean); } export async function gitDiffCommand() { const exts = Array.from(extensions.values(), (ext) => `*${ext}`); const excludeFiles = Array.from(excludePackagesFiles.values()); const gitIgnoreFiles = await getGitIgnoreFiles(); return git.diff([ '--cached', '--diff-filter=ACMRT', '--', ...exts, ...excludeFiles.map((file) => `:(exclude)${file}`), ...gitIgnoreFiles.map((file) => `:(exclude)${file}`), ]); } export async function gitDiffFiles() { const exts = Array.from(extensions.values(), (ext) => `*${ext}`); const excludeFiles = Array.from(excludePackagesFiles.values()); const gitIgnoreFiles = await getGitIgnoreFiles(); const raw = await git.diff([ '--name-only', '--cached', '--diff-filter=ACMRT', '--', ...exts, ...excludeFiles.map((file) => `:(exclude)${file}`), ...gitIgnoreFiles.map((file) => `:(exclude)${file}`), ]); return raw.split('\n').filter(Boolean); } export async function gitFiles(): Promise { const exts = Array.from(extensions.values(), (ext) => `*${ext}`); const excludeFiles = Array.from(excludePackagesFiles.values()); const gitIgnoreFiles = await getGitIgnoreFiles(); const raw = await git.raw([ 'ls-files', '--cached', '--others', '--exclude-standard', '--', ...exts, ...excludeFiles.map((file) => `:(exclude)${file}`), ...gitIgnoreFiles.map((file) => `:(exclude)${file}`), ]); return raw.split('\n').filter(Boolean); } ================================================ FILE: src/utils/helpers.ts ================================================ import ora from 'ora'; import {exec, spawn} from 'child_process'; import chalk from 'chalk'; import {Tiktoken} from '@dqbd/tiktoken/lite'; import {load} from '@dqbd/tiktoken/load'; import registry from '@dqbd/tiktoken/registry.json'; import models from '@dqbd/tiktoken/model_to_encoding.json'; export const exFunction = async (fn: () => Promise, message: string, successMessage: string): Promise => { const spinner = ora(message).start(); try { const result = await fn(); spinner.succeed(successMessage); return result; } catch (error) { spinner.fail(); throw error; } }; export function executeCommand(command: string) { return new Promise((resolve) => { const child = exec(command); child.stdout?.on('data', (data) => { process.stdout.write(data); }); child.stderr?.on('data', (data) => { process.stderr.write(data); }); child.on('close', (code) => { resolve(code); }); }); } export function executeShell(command: string) { return new Promise((resolve) => { const child = spawn(command, [], {shell: true}); child.stdout.pipe(process.stdout); child.stderr.pipe(process.stderr); child.stdin.pipe(process.stdin); process.stdin.pipe(child.stdin); child.on('close', (code) => { resolve(code); }); }); } export function checkNodeVersion() { const nodeVersion = process.versions.node.split('.')[0]; if (Number(nodeVersion) < 18) { console.log(`${chalk.red('✘')} Please update your node version to 18 or above\nCurrent version: ${nodeVersion}`); process.exit(1); } } export function getPackageManagerByOs() { const os = process.platform; const packageManager: Record = { linux: 'apt-get', darwin: 'brew', win32: 'choco', }; return packageManager[os] || 'apt-get'; } export async function calculateCost(modelName: string, docs: string[]) { const spinner = ora('Calculating cost').start(); try { const {bpe_ranks, special_tokens, pat_str} = await load(registry[models[modelName]]); const encoder = new Tiktoken(bpe_ranks, special_tokens, pat_str); const tokenCount = encoder.encode(JSON.stringify(docs)).length; const cost = (tokenCount / 1000) * 0.0005; encoder.free(); return cost; } finally { spinner.stop(); } } ================================================ FILE: src/utils/index.ts ================================================ export * from './inquirer'; ================================================ FILE: src/utils/inquirer.ts ================================================ import inquirer from 'inquirer'; import chalk from 'chalk'; export const askExecute = async (): Promise => { const {execute} = await inquirer.prompt<{ execute: 'Yes' | 'No' }>([ { type: 'list', name: 'execute', message: `🚀 Execute?`, choices: ['Yes', 'No'], }, ]); return execute === 'Yes'; }; export const askOpenEditor = async (): Promise => { const {openEditor} = await inquirer.prompt<{ openEditor: 'Yes' | 'No' }>([ { type: 'list', name: 'openEditor', message: `💻 Open in editor?`, choices: ['Yes', 'No'], }, ]); return openEditor === 'Yes'; }; export const askGoal = async (): Promise => { const {goal} = await inquirer.prompt<{ goal: string }>([ { type: 'input', name: 'goal', message: '🎯 Input your goal:', }, ]); return goal; }; export const askOpenAIKey = async (): Promise => { const {openAIKey} = await inquirer.prompt<{ openAIKey: string }>([ { type: 'input', name: 'openAIKey', message: '🔑 Enter your OpenAI API key. You can get your API key from https://beta.openai.com/account/api-keys:', }, ]); return openAIKey; }; export const askRetryRefactor = async (): Promise => { const {refactor} = await inquirer.prompt<{ refactor: 'Yes' | 'No' }>([ { type: 'list', name: 'refactor', message: `🔁 Retry refactor?`, choices: ['Yes', 'No'], }, ]); return refactor === 'Yes'; }; export const inputRefactor = async (): Promise => { const {refactor} = await inquirer.prompt<{ refactor: string }>([ { type: 'input', name: 'refactor', message: '🎯 Input your refactor plan:', }, ]); return refactor; }; export const askTest = async (): Promise => { const {test} = await inquirer.prompt<{ test: 'Yes' | 'No' }>([ { type: 'list', name: 'test', message: `🔁 Retry generate tests?`, choices: ['Yes', 'No'], }, ]); return test === 'Yes'; }; export const inputTest = async (): Promise => { const {input} = await inquirer.prompt<{ input: string }>([ { type: 'input', name: 'input', message: '🎯 Input your test plan:', }, ]); return input; }; export const customAsk = async (message: string): Promise => { const {ask} = await inquirer.prompt<{ ask: 'Yes' | 'No' }>([ { type: 'list', name: 'ask', message, choices: ['Yes', 'No'], }, ]); return ask === 'Yes'; }; export const inputAsk = async (): Promise => { const {ask} = await inquirer.prompt<{ ask: string }>([ { type: 'input', name: 'ask', message: '👉', }, ]); return ask; }; export const askCommit = async (commit: string): Promise => { const {ask} = await inquirer.prompt<{ ask: 'Yes' | 'No' }>([ { type: 'list', name: 'ask', message: `Do you want to commit with the following message? ${chalk.green(commit)} `, choices: ['Yes', 'No'], }, ]); return ask === 'Yes'; }; export const askRetryCommit = async (): Promise => { const {ask} = await inquirer.prompt<{ ask: 'Yes' | 'No' }>([ { type: 'list', name: 'ask', message: `Do you want to retry generating commit message?`, choices: ['Yes', 'No'], }, ]); return ask === 'Yes'; }; ================================================ FILE: src/utils/language-extensions.ts ================================================ import path from 'path'; export const programmingLanguageExtensions = { Text: ['.txt'], JavaScript: ['.js', '.mjs'], TypeScript: ['.ts', '.tsx'], CSS: ['.css', '.scss', '.less'], HTML: ['.html', '.htm'], JSON: ['.json'], Python: ['.py'], Java: ['.java'], C: ['.c'], 'C++': ['.cpp'], 'C#': ['.cs'], Go: ['.go'], PHP: ['.php'], Ruby: ['.rb'], Rust: ['.rs'], Swift: ['.swift'], Kotlin: ['.kt'], Scala: ['.scala'], 'Objective-C': ['.m', '.h'], Shell: ['.sh'], Perl: ['.pl', '.pm'], Lua: ['.lua'], SQL: ['.sql'], }; export const excludePackagesFilesList = { JavaScript: ['package-lock.json', 'yarn.lock'], Python: ['requirements.txt'], Java: ['pom.xml'], Go: ['go.mod'], PHP: ['composer.json'], Ruby: ['Gemfile'], Rust: ['Cargo.toml'], Swift: ['Package.swift'], Kotlin: ['build.gradle'], Scala: ['build.sbt'], 'Objective-C': ['Podfile'], Shell: ['package.json'], Perl: ['cpanfile'], Lua: ['rockspec'], }; export const extensionsList = Object.values(programmingLanguageExtensions).flat(); export const extensions = new Set(extensionsList); export const excludePackagesFiles = new Set(Object.values(excludePackagesFilesList).flat()); export const filterFilesByExtensions = (files: string[]): string[] => { return files.filter((file) => extensions.has(path.extname(file))); }; ================================================ FILE: src/utils/update.ts ================================================ import ora from 'ora'; import {exec} from 'child_process'; import semver from 'semver'; import chalk from 'chalk'; // @ts-ignore import {name, version} from '../../package.json'; import {promisify} from 'util'; export async function checkUpdate() { const spinner = ora('Checking for updates').start(); const execPromise = promisify(exec); const {stdout} = await execPromise('npm view auto-copilot-cli version'); const latestVersion = semver.clean(stdout); if (!latestVersion) { spinner.fail(chalk.yellow('Could not check for updates')); return; } if (semver.gt(latestVersion, version)) { spinner.fail( chalk.yellow(`Please update ${name} to the latest version: ${chalk.blue('npm i -g auto-copilot-cli')}`), ); } else { spinner.stop(); } } ================================================ FILE: tsconfig.json ================================================ { "compilerOptions": { "target": "es2021", "module": "commonjs", "outDir": "./dist", "rootDir": "./src", "baseUrl": "./src", "strict": true, "esModuleInterop": true, "resolveJsonModule": true, "noImplicitAny": false, "skipLibCheck": true }, "include": ["src/**/*"], "exclude": ["node_modules", "**/*.spec.ts"] }