[
  {
    "path": ".github/workflows/main.yml",
    "content": "name: Build\nrun-name: Building changes\non: [push]\njobs:\n  main:\n    runs-on: ubuntu-latest\n    services:\n      postgres:\n        image: postgres:alpine\n        env:\n          POSTGRES_USER: postgres\n          POSTGRES_PASSWORD: password\n          POSTGRES_DB: schemats\n        # Set health checks to wait until postgres has started\n        options: >-\n          --health-cmd pg_isready\n          --health-interval 10s\n          --health-timeout 5s\n          --health-retries 5\n        ports:\n          # Maps tcp port 5432 on service container to the host\n          - 5432:5432\n    steps:\n      - name: Check out repository code\n        uses: actions/checkout@v4\n      - run: yarn install\n      - run: yarn run build\n      - run: yarn run example:postgres\n"
  },
  {
    "path": ".github/workflows/publish.yml",
    "content": "name: Publish Package to npmjs\non:\n  release:\n    types: [published]\njobs:\n  build:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v4\n      # Setup .npmrc file to publish to npm\n      - uses: actions/setup-node@v4\n        with:\n          node-version: '20.x'\n          registry-url: 'https://registry.npmjs.org'\n      - run: yarn install\n      - run: yarn run build\n      - run: yarn publish\n        env:\n          NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}"
  },
  {
    "path": ".gitignore",
    "content": "# Build\n.build\n\n# Logs\nlogs\n*.log\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\nlerna-debug.log*\n.pnpm-debug.log*\n\n# Diagnostic reports (https://nodejs.org/api/report.html)\nreport.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json\n\n# Runtime data\npids\n*.pid\n*.seed\n*.pid.lock\n\n# Directory for instrumented libs generated by jscoverage/JSCover\nlib-cov\n\n# Coverage directory used by tools like istanbul\ncoverage\n*.lcov\n\n# nyc test coverage\n.nyc_output\n\n# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)\n.grunt\n\n# Bower dependency directory (https://bower.io/)\nbower_components\n\n# node-waf configuration\n.lock-wscript\n\n# Compiled binary addons (https://nodejs.org/api/addons.html)\nbuild/Release\n\n# Dependency directories\nnode_modules/\njspm_packages/\n\n# Snowpack dependency directory (https://snowpack.dev/)\nweb_modules/\n\n# TypeScript cache\n*.tsbuildinfo\n\n# Optional npm cache directory\n.npm\n\n# Optional eslint cache\n.eslintcache\n\n# Microbundle cache\n.rpt2_cache/\n.rts2_cache_cjs/\n.rts2_cache_es/\n.rts2_cache_umd/\n\n# Optional REPL history\n.node_repl_history\n\n# Output of 'npm pack'\n*.tgz\n\n# Yarn Integrity file\n.yarn-integrity\n\n# dotenv environment variables file\n.env\n.env.test\n.env.production\n\n# parcel-bundler cache (https://parceljs.org/)\n.cache\n.parcel-cache\n\n# Next.js build output\n.next\nout\n\n# Nuxt.js build / generate output\n.nuxt\ndist\n\n# Gatsby files\n.cache/\n# Comment in the public line in if your project uses Gatsby and not Next.js\n# https://nextjs.org/blog/next-9-1#public-directory-support\n# public\n\n# vuepress build output\n.vuepress/dist\n\n# Serverless directories\n.serverless/\n\n# FuseBox cache\n.fusebox/\n\n# DynamoDB Local files\n.dynamodb/\n\n# TernJS port file\n.tern-port\n\n# Stores VSCode versions used for testing VSCode extensions\n.vscode-test\n\n# yarn v2\n.yarn/cache\n.yarn/unplugged\n.yarn/build-state.yml\n.yarn/install-state.gz\n.pnp.*"
  },
  {
    "path": ".npmignore",
    "content": ".npmrc\nnode_modules\n.circleci"
  },
  {
    "path": ".yarnrc.yml",
    "content": "nodeLinker: node-modules\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "## [1.0.8] - 2022.03.20\n\nmisc: updating packages\nci: switching to github actions\n\n## [1.0.7] - 2022.11.27\n\nfeat: add --no-bigint #17 (wirekang)\nfeat: add --no-optional #18 (wirekang)\nfix: --no-write-header to --no-header (wirekang)\nmisc: updating packages\n\n## [1.0.6] - 2022.08.29\n\nchore: upgrading dependencies\n\n## [1.0.5] - 2022.06.26\n\nfeat: infer bigint type \nchore: upgrading dependencies\n\n## [1.0.4] - 2022.02.22\n\nfix(mysql): for some versions of mysql key casing results in empty hash lookups\nfeat(postgres): Adding types 'mol', 'bfp' and 'bit' \n\n## [1.0.3] - 2022.02.22\n\nchore: adding ci\n\n## [1.0.2] - 2022.02.19\n\nchore: Upgrading dependencies\n\n## [1.0.1] - 2022.02.03\n\nfix(postgres): (bchrobot) adding missing cli command throwOnMissingType\n\nfix(postgres): (bchrobot) typo in write-header option\n\n## [0.0.12] - 2021.11.01\n\nfix: typo in CLI\n\n## [0.0.11] - 2021.10.31\n\nfeat: adding mysql compatability\n\nThis allows you to do the same thing just with mysql using `/bin/schemats mysql $connection_string -s $schema_name `\n\n## [0.0.10] - 2021.09.02\n\nchore: updating all dependencies\n\nfeat: add -C --camelCaseTypes option\n    \n    This option adds the ability to camel case just the type names - which\n    gives a good mix between using JS Standard Camel Case and still\n    following the actual definitions of database.\n    \n    The issue with using camel case for both the types and the keys\n    is that we would have to provide a layer within the programs using the\n    types to convert back to the original form if the attributes are\n    different in JS than in the schema.\n    \n    There are definately\n    issues with this, especially with a database schema with an inconsistent\n    naming convention - we would have to provide some sort of mapping file\n    to acheive correct conversion.\n    \n    The types on the other hand, only exist in JS and therefore can be named\n    whatever we want when generating the types.\n\nfix(schema): add 'tsvector' to string types\n    \n    Text Search Vectors are a complex type inside of postgres, but can\n    generally be expressed as strings within TS.\n\nfix(generator): quote string enum keys\n    \n    This helps prevent issues in the generated file due to special\n    characters like `:` present in the postgres enum keys.\n\n## [0.0.9] - 2021.08.27\n\ndoc: adding example documentation\nfix: Don't export custom types if empty\n\n## [0.0.8] - 2021.08.22\n\nFeat: Exporting tables and Custom types for typed-postgres\n\n## [0.0.7] - 2021.08.05\n\nFix: array regression due to bad merge\n\n## [0.0.6] - 2021.08.05\n\nFeat: using the -f flag to reference a file with non DB types and adding comments to columns in postgres using `COMMENT ON COLUMN schema.table.column is '@type {TYPE}';` now allows us to type jsonb columns directly\n\n## [0.0.5] - 2021.07.26\n\nFix: isArray overrides real value with false\n\n## [0.0.4] - 2021.07.26\n\nFix: publish dist and src packages\n\n## [0.0.3] - 2021.07.26\n\nFix: nullable fields are also optional\n\n## [0.0.2] - 2021.07.26\n\nFix: Adding support for arrays\n\n## [0.0.1] - 2021.06.20\n\nInclude README file in published package\n\n## [0.0.0] - 2021.06.20\n\nFirst release\n"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2021 Vlandor Ltd\nCopyright (c) 2016 SweetIQ\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "# Schemats\n\nBefore anything, I would like to give a massive thank you to [sweetiq](https://www.npmjs.com/package/schemats) and their contributors for giving me a huge head start.\n\nThe reason I have created a new repo instead of a fork is because I don't support mysql and have some breaking changes due to how this library is consumed by [postgres-typed](https://github.com/vramework/postgres-typed) and [vramework](https://vramework.io/).\n\nI have kept the name and based off their MIT license as means of attribution and thanks.\n\n## Why Schemats\n\nBecause being able to make a change to your database structure and have it:\n\n- validate through your node backend APIs\n- get verified against automatically generate JSON schemas\n- raise errors in your frontend application \n\nIs just a great developer experience in my opinion.\n\nThis allows us to some pretty amazing things when it comes to refactoring and maintaining codebases, and\nalso provide the meta-data to help with libraries like [postgres-typed](https://github.com/vramework/postgres-typed). \n\n## Quickstart\n\n### Installing\n\n```bash\nyarn add -d @vramework/schemats || npm install -d @vramework/schemats\n```\n\n### Generating the type definition from schema\n\nAssuming you have the following schema (this is a bit of a random one):\n\n```sql\nCREATE SCHEMA \"pet_store\";\n\nCREATE TYPE \"pet_store\".\"animal\" AS enum (\n  'cat',\n  'dog'\n);\n\nCREATE TABLE \"pet_store\".\"user\" (\n  \"uuid\" uuid PRIMARY KEY default gen_random_uuid(),\n  \"name\" text NOT NULL\n);\n\nCREATE TABLE \"pet_store\".\"pet\" (\n  \"uuid\" uuid PRIMARY KEY default gen_random_uuid(),\n  \"owner\" uuid REFERENCES \"pet_store\".\"user\",\n  \"type\" pet_store.animal NOT NULL,\n  \"name\" text NOT NULL,\n  \"birthdate\" date,\n  \"last_seen_location\" point,\n  \"random_facts\" jsonb,\n  \"pet_search_document\" tsvector\n);\nCOMMENT ON COLUMN pet_store.pet.random_facts is '@type {RandomPetFacts}';\n```\n\nYou can now generate a bunch of different schema definitions.\n\nMy personal favourite is the following:\n\n```bash\nschemats postgres postgres://postgres@localhost/database -f ./db-custom-types.ts -s pet_store -c -e -o db-types.ts\n```\n\nWhile will result in the following typescript file: \n\n```typescript\n\n/**\n * AUTO-GENERATED FILE @ Fri, 27 Aug 2021 08:26:50 GMT - DO NOT EDIT!\n *\n * This file was automatically generated by schemats v.0.0.8\n * $ schemats generate postgres://username:password@localhost:5432/schemats -C -s pet_store\n *\n */\n\nimport { RandomPetFacts } from './db-custom-types'\n\nexport enum Animal {\n\t'Cat' = 'cat',\n\t'Dog' = 'dog' \n}\n\nexport interface User { \n\tuuid: string\n\tname: string \n}\n\nexport interface Pet { \n\tuuid: string\n\towner?: string | null\n\ttype: Animal\n\tname: string\n\tbirthdate?: Date | null\n\tlastSeenLocation?: { x: number, y: number } | null\n\trandomFacts?: RandomPetFacts | null\n\tmoreRandomFacts?: unknown | null \n\tpetSearchDocument?: string | null\n}\n\nexport interface Tables {\n    user: User,\n\tpet: Pet\n}\n\nexport type CustomTypes = RandomPetFacts\n```\n\nBut you have quite a bit of flexbility:\n\n```bash\nUsage: schemats mysql [options] [connection]\n\nGenerate a typescript schema from mysql\n\nArguments:\n  connection                   The connection string to use, if left empty will use env variables\n\nOptions:\n  -s, --schema <schema>        the schema to use (default: \"public\")\n  -t, --tables <tables...>     the tables within the schema\n  -f, --typesFile <typesFile>  the file where jsonb types can be imported from\n  -c, --camelCase              use camel case for enums, table names, and column names\n  -C, --camelCaseTypes         use camel case only for TS names - not modifying the column names\n  -e, --enums                  use enums instead of types\n  -o, --output <output>        where to save the generated file relative to the current working directory\n  --no-header                  don't generate a header\n  -h, --help                   display help for command\n```\n\n```bash\nGenerate a typescript schema from mysql\n\nArguments:\n  connection                   The connection string to use, if left empty will use env variables\n\nOptions:\n  -s, --schema <schema>        the schema to use (default: \"public\")\n  -t, --tables <tables...>     the tables within the schema\n  -f, --typesFile <typesFile>  the file where jsonb types can be imported from\n  -c, --camelCase              use camel case for enums, table names, and column names\n  -C, --camelCaseTypes         use camel case only for TS names - not modifying the column names\n  -e, --enums                  use enums instead of types\n  -o, --output <output>        where to save the generated file relative to the current working directory\n  --no-header                  don't generate a header\n  -h, --help                   display help for command\n```\n\n## Features\n\n### Camel Case `-c --camelCase, -C --camelCaseTypes`\n\nThis automatically turns all your tables and Enums / Types and column names to camelcase, which is the default\nexperience for javascript and is more consistent to use\n\nYou can use Camel Case Types to just camel case the TS entities - leaving the strings representing \nthe SQL columns alone.\n\n### Enums `-e --enums`\n\nUsing enums turns all postgres enums into Enums instead of normal types, which is just a\npreference aspect for developers since renaming enum values or order will change the Enum\nkey and value.\n\n### Types File `-f --typesFile <typesFile>`\n\nThis is a VERY useful feature for jsonb fields. Normally a jsonb field type is unknown, \nhowever if you provide a types json file this will get the type out of the comment \nof a field and assign it to the value.\n\nThe structure of a custom type file could either be from another file:\n\n```typescript\nexport type { RandomPetFacts }  from './somewhere-else'\n```\n\nor it could just be defined straight in the file.\n\n```typescript\nexport type RandomPetFacts = Record<string, string>\n```\n\n### Tables | Custom Types `-t --tables <tables...>`\n\nThese types are automatically generated to power typed-postgres\n\n## Using in typescript\n\nYou can import all your interfaces / enums from the file:\n\n```typescript\nimport * as DB from './db-types'\n\n// And then you can start picking how you want your APIs to be used:\ntype updatePetLocation = Pick<DB.Pet, 'lastSeenAt'>\n```\n\n## Tests\n\nSo where are the tests? The original schemats library has an amazing 100% coverage and this one has 0.\n\nTo be honest, I'm using this library in a few of my current projects and any error in it throws dozens \nin the entire codebase, so it sort of tests itself. That being said I will be looking to add some in again,\nbut in terms of priorties not my highest.\n\nHowever for manual testing and experimenting you can easily replicate this project by:\n\n```bash\n# Clone the repo\ngit clone git@github.com:vramework/schemats.git\n# Enter repo\ncd schemats\n# Install dependencies\nyarn install\n# Run the example, which will run create the schemats library and generate the db-types library\nyarn run example:postgres\n```\n\n"
  },
  {
    "path": "bin/schemats-mysql.ts",
    "content": "import * as commander from 'commander'\nimport { Config, typescriptOfSchema } from '../src/generator'\nimport { MysqlDatabase } from '../src/schema-mysql'\nimport { promises } from 'fs'\nimport { relative } from 'path'\n\n// work-around for:\n// TS4023: Exported variable 'command' has or is using name 'local.Command'\n// from external module \"node_modules/commander/typings/index\" but cannot be named.\nexport type Command = commander.Command\n\nexport const mysql = async (program: Command): Promise<void> => {\n    program\n        .command('mysql')\n        .description('Generate a typescript schema from mysql')\n        .argument('[connection]', 'The connection string to use, if left empty will use env variables')\n        .option('-s, --schema <schema>', 'the schema to use', 'public')\n        .option('-t, --tables <tables...>', 'the tables within the schema')\n        .option('-c, --camelCase', 'use camel case for enums, table names, and column names')\n        .option('-e, --enums', 'use enums instead of types')\n        .option('-o, --output <output>', 'where to save the generated file relative to the current working directory')\n        .option('--no-header', 'don\\'t generate a header')\n        .option('--no-bigint', 'use number instead of bigint')\n        .option('--no-optional', 'don\\'t make nullable field optional')\n        .action(async (connection, rest) => {\n            const config = new Config(rest)\n            const database = new MysqlDatabase(config, connection)\n            await database.isReady()\n            const schema = await typescriptOfSchema(config, database)\n            if (rest.output) {\n                const outputPath = relative(process.cwd(), rest.output)\n                await promises.writeFile(outputPath, schema, 'utf8')\n                console.log(`Written schema to ${outputPath}`)\n            } else {\n                console.log(schema)\n            }\n            await database.close()\n        })\n\n  program.action(program.help)\n}\n"
  },
  {
    "path": "bin/schemats-postgres.ts",
    "content": "import * as commander from 'commander'\nimport { Config, typescriptOfSchema } from '../src/generator'\nimport { PostgresDatabase } from '../src/schema-postgres'\nimport { promises } from 'fs'\nimport { relative } from 'path'\n\n// work-around for:\n// TS4023: Exported variable 'command' has or is using name 'local.Command'\n// from external module \"node_modules/commander/typings/index\" but cannot be named.\nexport type Command = commander.Command\n\nexport const postgres = async (program: Command): Promise<void> => {\n    program\n        .command('postgres')\n        .arguments('[connection]')\n        .option('-s, --schema <schema>', 'the schema to use', 'public')\n        .option('-t, --tables <tables...>', 'the tables within the schema')\n        .option('-f, --typesFile <typesFile>', 'the file where jsonb types can be imported from')\n        .option('-c, --camelCase', 'use camel case for enums, table names, and column names')\n        .option('-C, --camelCaseTypes', 'use camel case only for TS names - not modifying the column names')\n        .option('-e, --enums', 'use enums instead of types')\n        .option('-o, --output <output>', 'where to save the generated file relative to the current working directory')\n        .option('--no-header', 'don\\'t generate a header')\n        .option('--no-throw-on-missing-type', 'don\\'t throw an error when pg type cannot be mapped to ts type')\n        .option('--no-bigint', 'use number instead of bigint')\n        .option('--no-optional', 'don\\'t make nullable field optional')\n        .description('Generate a typescript schema from postgres', {\n            connection: 'The connection string to use, if left empty will use env variables'\n        })\n        .action(async (connection, rest) => {\n            const config = new Config(rest)\n            const database = new PostgresDatabase(config, connection)\n            await database.isReady()\n            const schema = await typescriptOfSchema(config, database)\n            if (rest.output) {\n                const outputPath = relative(process.cwd(), rest.output)\n                await promises.writeFile(outputPath, schema, 'utf8')\n                console.log(`Written schema to ${outputPath}`)\n            } else {\n                console.log(schema)\n            }\n            await database.close()\n        })\n\n  program.action(program.help)\n}\n"
  },
  {
    "path": "bin/schemats.ts",
    "content": "#!/usr/bin/env node\nimport { version } from '../package.json'\n\nimport { Command } from 'commander'\nimport { postgres } from './schemats-postgres'\nimport { mysql } from './schemats-mysql'\n\nconst program = new Command('schemats')\nprogram.usage('[command]').version(version.toString())\n\npostgres(program)\nmysql(program)\n\nprogram.parseAsync(process.argv)"
  },
  {
    "path": "example/create-db.ts",
    "content": "import { promises } from 'fs'\nimport { Client } from 'pg'\n\nconst createDB = async () => {\n    const db = new Client('postgres://postgres:password@localhost/postgres')\n    await db.connect()\n    const r = await db.query(`SELECT 'CREATE DATABASE schemats' as create WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'schemats')`)\n    const createSql = r.rows[0]?.create\n    if (createSql) {\n        await db.query(createSql)\n    }    \n    await db.end()\n}\n\nconst main = async () => {\n    await createDB()\n\n    const db = new Client('postgres://postgres:password@localhost/schemats')\n    await db.connect()\n    await db.query<{ version: string }>(`SELECT version()`)\n    await db.query(await promises.readFile(`${__dirname}/schema.sql`, 'utf-8'))\n    await db.end()\n\n}\n\nmain()"
  },
  {
    "path": "example/db-custom-types.ts",
    "content": "export type RandomPetFacts = Record<string, string>"
  },
  {
    "path": "example/db-types.ts",
    "content": "\n/**\n * AUTO-GENERATED FILE @ Wed, 20 Mar 2024 14:40:42 GMT - DO NOT EDIT!\n *\n * This file was automatically generated by schemats v.1.0.7\n * $ schemats generate postgres://username:password@localhost:5432/schemats -C -s pet_store\n *\n */\n\nimport { RandomPetFacts } from './db-custom-types'\n\n\n\nexport enum Animal {\n  'Dog' = 'dog',\n  'Cat' = 'cat' \n}\n\nexport interface User { \n  uuid: string\n  name: string \n}\n\nexport interface Pet { \n  uuid: string\n  owner?: string | null\n  type: Animal\n  name: string\n  birthdate?: Date | null\n  lastSeenLocation?: { x: number, y: number } | null\n  randomFacts?: RandomPetFacts | null\n  moreRandomFacts?: unknown | null\n  cuteName?: string | null \n}\n\nexport interface Tables {\n  user: User,\n  pet: Pet\n}\n\nexport type CustomTypes = RandomPetFacts"
  },
  {
    "path": "example/schema.sql",
    "content": "CREATE EXTENSION IF NOT EXISTS pgcrypto;\n\nDROP SCHEMA IF EXISTS \"pet_store\" CASCADE;\nCREATE SCHEMA \"pet_store\";\n\nCREATE TYPE \"pet_store\".\"animal\" AS enum (\n  'cat',\n  'dog'\n);\n\nCREATE TABLE \"pet_store\".\"user\" (\n  \"uuid\" uuid PRIMARY KEY default gen_random_uuid(),\n  \"name\" text NOT NULL\n);\n\nCREATE TABLE \"pet_store\".\"pet\" (\n  \"uuid\" uuid PRIMARY KEY default gen_random_uuid(),\n  \"owner\" uuid REFERENCES \"pet_store\".\"user\",\n  \"type\" pet_store.animal NOT NULL,\n  \"name\" text NOT NULL,\n  \"birthdate\" date,\n  \"last_seen_location\" point,\n  \"random_facts\" jsonb,\n  \"more_random_facts\" jsonb,\n  \"cute_name\" tsvector\n);\nCOMMENT ON COLUMN pet_store.pet.random_facts is '@type {RandomPetFacts}';"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"@vramework/schemats\",\n  \"version\": \"1.0.8\",\n  \"description\": \"Generate typescript interface definitions from postgres SQL database schema\",\n  \"keywords\": [\n    \"postgres\",\n    \"schema\",\n    \"typescript\",\n    \"sql\"\n  ],\n  \"main\": \"./dist/index.js\",\n  \"types\": \"./dist/index.d.ts\",\n  \"scripts\": {\n    \"ncu\": \"ncu\",\n    \"build\": \"rm -rf dist && tsc\",\n    \"example:create-db\": \"ts-node example/create-db.ts\",\n    \"example:generate:postgres\": \"ts-node ./bin/schemats postgres postgres://postgres:password@localhost/schemats -s pet_store -o example/db-types.ts -f ./db-custom-types -c -e\",\n    \"example:postgres\": \"yarn run example:create-db && yarn run example:generate:postgres\"\n  },\n  \"bin\": \"dist/bin/schemats.js\",\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"https://github.com/vramework/schemats.git\"\n  },\n  \"bugs\": {\n    \"url\": \"https://github.com/vramework/schemats/issues\"\n  },\n  \"author\": \"Vlandor Ltd\",\n  \"contributors\": [\n    \"Mengxuan Xia <xiamx2004@gmail.com>\",\n    \"Arnaud Benhamdine <arnaud.benhamdine@gmail.com>\",\n    \"zigomir <zigomir@gmail.com>\",\n    \"Mark Crisp <macr1324@gmail.com>\"\n  ],\n  \"license\": \"MIT\",\n  \"devDependencies\": {\n    \"@types/node\": \"^20.11.30\",\n    \"@types/pg\": \"^8.11.3\",\n    \"@types/sinon\": \"^17.0.3\",\n    \"ts-node\": \"^10.9.2\",\n    \"typescript\": \"^5.4.2\"\n  },\n  \"dependencies\": {\n    \"camelcase\": \"^6\",\n    \"commander\": \"^12.0.0\",\n    \"mysql2\": \"^3.9.2\",\n    \"pg\": \"^8.11.3\"\n  }\n}\n"
  },
  {
    "path": "src/config.ts",
    "content": "import camelCase from 'camelcase'\n\nexport interface ConfigValues {\n    schema: string\n    tables: string[]\n    camelCase?: boolean\n    camelCaseTypes?: boolean\n    header?: boolean\n    typesFile?: boolean\n    throwOnMissingType?: boolean\n    enums?: boolean\n    bigint?: boolean\n    optional?: boolean\n}\n\nexport class Config {\n    constructor (public config: Partial<ConfigValues> & Pick<ConfigValues, 'schema' | 'tables'>) {\n        this.config = {\n            header: true,\n            camelCase: false,\n            throwOnMissingType: true,\n            enums: false,\n            bigint: true,\n            optional: true,\n            ...config\n        }\n    }\n\n    public getCLICommand (dbConnection: string): string {\n        const commands = ['schemats', 'generate', dbConnection]\n        if (this.config.camelCase) {\n            commands.push('-C')\n        }\n        if (this.config.tables?.length > 0) {\n            commands.push('-t', this.config.tables.join(' '))\n        }\n        if (this.config.schema) {\n            commands.push(`-s ${this.config.schema}`)\n        }\n        return commands.join(' ')\n    }\n\n    public get enums () {\n        return this.config.enums\n    }\n\n    public get tables () {\n        return this.config.tables\n    }\n\n    public get schema () {\n        return this.config.schema\n    }\n\n    public get writeHeader () {\n        return this.config.header\n    }\n\n    public get typesFile () {\n        return this.config.typesFile\n    }\n\n    public get throwOnMissingType () {\n        return this.config.throwOnMissingType\n    }\n\n    public transformTypeName (typename: string) {\n        return (this.config.camelCase || this.config.camelCaseTypes) ? camelCase(typename, { pascalCase: true }) : typename\n    }\n\n    public transformColumnName (columnName: string) {\n        return this.config.camelCase ? camelCase(columnName) : columnName\n    }\n}\n"
  },
  {
    "path": "src/generator.ts",
    "content": "import { Config, ConfigValues } from './config'\nimport { version } from '../package.json'\nimport { Database } from './schema-interfaces'\nimport camelcase from 'camelcase'\nimport { EnumTypes, TableDefinition } from './schema-interfaces'\n\nconst generateHeader = (config: Config, db: Database): string => {\n    return `\n/**\n * AUTO-GENERATED FILE @ ${new Date().toUTCString()} - DO NOT EDIT!\n *\n * This file was automatically generated by schemats v.${version}\n * $ ${config.getCLICommand(db.getConnectionString())}\n *\n */`\n}\n\nconst reservedJSNames = new Set(['string', 'number', 'package'])\nconst normalizeName = (name: string): string => reservedJSNames.has('name') ? `${name}_` : name\n\nexport function generateEnum(config: Config, enumObject: EnumTypes): string[] {\n    const enumStrings = []\n    for (let enumNameRaw in enumObject) {\n        const enumName = config.transformTypeName(enumNameRaw)\n        if (config.enums) {\n            enumStrings.push(`export enum ${enumName} {\\n${enumObject[enumNameRaw].map((v: string) => `  '${camelcase(v, { pascalCase: true })}' = '${v}'`).join(',\\n')} \\n}`)\n        } else {\n            enumStrings.push(`export type ${enumName} = ${enumObject[enumNameRaw].map((v: string) => `'${v}'`).join(' | ')}`)\n        }\n    }\n    return enumStrings\n}\n\nexport function generateTableInterface(config: Config, tableNameRaw: string, tableDefinition: TableDefinition) {\n    const tableName = config.transformTypeName(tableNameRaw)\n    let members = ''\n    const entries = Object.entries(tableDefinition)\n    for (const [name, { tsType, nullable, isArray }] of entries) {\n        const columnName = config.transformColumnName(name)\n        members += `\\n  ${normalizeName(columnName)}${nullable && config.config.optional ? '?' : ''}: ${tsType}${isArray ? '[]' : ''}${nullable ? ' | null' : ''}`\n    }\n    return `export interface ${normalizeName(tableName)} { ${members} \\n}`\n}\n\nexport const typescriptOfTable = async (config: Config, db: Database, schema: string, table: string, types: Set<string>) => {\n    const tableTypes = await db.getTableTypes(schema, table, types)\n    return generateTableInterface(config, table, tableTypes)\n}\n\nexport const typescriptLookupForTables = (config: Config, tables: string[]): string => {\n    const types = tables.map(t => `${t}: ${config.transformTypeName(t)}`)\n    return `export interface Tables {\n  ${types.join(',\\n  ')}\n}`\n}\n\nexport const typescriptOfSchema = async (config: Config, db: Database): Promise<string> => {\n    const schema = config.schema || await db.getDefaultSchema()\n    const tables = config.tables || await db.getSchemaTables(schema)\n    const enums = await db.getEnums(schema)\n    const enumTypes = generateEnum(config, enums)\n    const jsonTypesToImport = new Set<string>()\n    const interfaces = await Promise.all(tables.map(table => typescriptOfTable(config, db, schema, table, jsonTypesToImport)))\n    const output = [enumTypes.join('\\n\\n'), interfaces.join('\\n\\n')]\n\n    if (config.typesFile && jsonTypesToImport.size) {\n        output.unshift(`import { ${Array.from(jsonTypesToImport).join(', ')} } from '${config.typesFile}'\\n\\n`)\n    }\n    \n    if (config.writeHeader) {\n        output.unshift(generateHeader(config, db))\n    }\n    \n    output.push(typescriptLookupForTables(config, tables))\n\n    if (jsonTypesToImport.size) {\n        output.push(`export type CustomTypes = ${Array.from(jsonTypesToImport).join(' | ')}`)\n    }\n    \n    return output.join('\\n\\n')\n}\n\nexport {\n    Config,\n    ConfigValues\n}\n"
  },
  {
    "path": "src/schema-interfaces.ts",
    "content": "export interface ForeignKey {\n    table: string;\n    column: string;\n}\n\nexport interface ColumnDefinition {\n    udtName: string,\n    nullable: boolean,\n    tsType?: string\n    isArray: boolean\n    comment?: string;\n    foreignKey?: ForeignKey\n    hasDefault: boolean\n}\n\nexport interface Metadata {\n    schema: string;\n    enumTypes: any\n    foreignKeys: Record<string, { [columnName: string]: ForeignKey }>\n    tableToKeys: Record<string, string>\n    columnComments: Record<string, Record<string, string>>\n    tableComments: Record<string, string>\n}\n\nexport type EnumTypes = Record<string, string[]>\nexport type TableDefinition = Record<string, ColumnDefinition>\n\nexport interface Database {\n    version: string\n    getConnectionString: () => string\n    isReady(): Promise<void>\n    close(): Promise<void>\n    getDefaultSchema(): string\n    getEnums(schemaName: string): Promise<EnumTypes>\n    getTableDefinition(schemaName: string, tableName: string): Promise<TableDefinition>\n    getTableTypes(schemaName: string, tableName: string, types: Set<string>): Promise<TableDefinition>\n    getSchemaTables(schemaName: string): Promise<string[]>\n}\n"
  },
  {
    "path": "src/schema-mysql.ts",
    "content": "import { Config } from './generator'\nimport { TableDefinition, Database, EnumTypes } from './schema-interfaces'\nimport { Connection, createConnection, RowDataPacket } from 'mysql2/promise'\n\n// uses the type mappings from https://github.com/mysqljs/ where sensible\nconst mapTableDefinitionToType = (config: Config, tableDefinition: TableDefinition, enumTypes: Set<string>, customTypes: Set<string>, columnDescriptions: Record<string, string>): TableDefinition => {\n    return Object.entries(tableDefinition).reduce((result, [columnName, column]) => {\n        switch (column.udtName) {\n            case 'char':\n            case 'varchar':\n            case 'text':\n            case 'tinytext':\n            case 'mediumtext':\n            case 'longtext':\n            case 'time':\n            case 'geometry':\n            case 'set':\n            case 'enum':\n                // keep set and enum defaulted to string if custom type not mapped\n                column.tsType = 'string'\n                break\n            case 'bigint':\n                if(config.config.bigint) {\n                    column.tsType = 'bigint'\n                } else {\n                    column.tsType = 'number'\n                }\n                break\n            case 'integer':\n            case 'int':\n            case 'smallint':\n            case 'mediumint':\n            case 'double':\n            case 'decimal':\n            case 'numeric':\n            case 'float':\n            case 'year':\n                column.tsType = 'number'\n                break\n            case 'tinyint':\n                column.tsType = 'boolean'\n                break\n            case 'json':\n                column.tsType = 'unknown'\n                if (columnDescriptions[columnName]) {\n                    const type = /@type \\{([^}]+)\\}/.exec(columnDescriptions[columnName])\n                    if (type) {\n                        column.tsType = type[1].trim()\n                        customTypes.add(column.tsType)\n                    }\n                }\n                break\n            case 'date':\n            case 'datetime':\n            case 'timestamp':\n                column.tsType = 'Date'\n                break\n            case 'tinyblob':\n            case 'mediumblob':\n            case 'longblob':\n            case 'blob':\n            case 'binary':\n            case 'varbinary':\n            case 'bit':\n                column.tsType = 'Buffer'\n                break\n            default:\n                if (enumTypes.has(column.udtName)) {\n                    column.tsType = config.transformTypeName(column.udtName)\n                    break\n                } else {\n                    const warning = `Type [${column.udtName} has been mapped to [any] because no specific type has been found.`\n                    if (config.throwOnMissingType) {\n                        throw new Error(warning)\n                    }\n                    console.log(`Type [${column.udtName} has been mapped to [any] because no specific type has been found.`)\n                    column.tsType = 'any'\n                    break\n                }\n        }\n        result[columnName] = column\n        return result\n    }, {} as TableDefinition)\n}\n\nconst parseMysqlEnumeration = (mysqlEnum: string): string[] => {\n    return mysqlEnum.replace(/(^(enum|set)\\('|'\\)$)/gi, '').split(`','`)\n}\n\nconst getEnumNameFromColumn = (dataType: string, columnName: string): string => {\n    return `${dataType}_${columnName}`\n}\n\nexport class MysqlDatabase implements Database {\n    public version: string = ''\n    private db!: Connection\n\n    constructor (private config: Config, public connectionString: string) {\n    }\n\n    public async isReady(): Promise<void> {\n        this.db = await createConnection(this.connectionString)\n    }\n\n    public async close(): Promise<void> {\n        await this.db.destroy()\n    }\n\n    public getConnectionString (): string {\n        return this.connectionString\n    }\n\n    public getDefaultSchema (): string {\n        return 'public'\n    }\n\n    public async getEnums(schema: string): Promise<EnumTypes> {\n        const rawEnumRecords = await this.query<{ COLUMN_NAME: string, COLUMN_TYPE: string, DATA_TYPE: string }>(`\n            SELECT COLUMN_NAME, COLUMN_TYPE, DATA_TYPE\n            FROM information_schema.columns\n            WHERE data_type IN ('enum', 'set') and table_schema = ?\n        `, [schema])\n        return rawEnumRecords.reduce((result, { COLUMN_NAME, COLUMN_TYPE, DATA_TYPE }) => {\n            const enumName = getEnumNameFromColumn(DATA_TYPE, COLUMN_NAME)\n            const enumValues = parseMysqlEnumeration(COLUMN_TYPE)\n            if (result[enumName] && JSON.stringify(result[enumName]) !== JSON.stringify(enumValues)) {\n                throw new Error(\n                    `Multiple enums with the same name and contradicting types were found: ${COLUMN_NAME}: ${JSON.stringify(result[enumName])} and ${JSON.stringify(enumValues)}`\n                )\n            }\n            result[enumName] = enumValues\n            return result\n        }, {} as EnumTypes)\n    }\n\n    public async getTableDefinition (tableSchema: string, tableName: string): Promise<TableDefinition> {\n        const tableColumns = await this.query<{ COLUMN_NAME: string, DATA_TYPE: string, IS_NULLABLE: string, COLUMN_DEFAULT: string }>(`\n            SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_DEFAULT\n            FROM information_schema.columns\n            WHERE table_name = ? and table_schema = ?`,\n            [tableName, tableSchema]\n        )\n        const tableDefinition = tableColumns.reduce((result, schemaItem) => {\n            const columnName = schemaItem.COLUMN_NAME\n            const dataType = schemaItem.DATA_TYPE\n            result[columnName] = {\n                udtName: /^(enum|set)$/i.test(dataType) ? getEnumNameFromColumn(dataType, columnName) : dataType,\n                nullable: schemaItem.IS_NULLABLE === 'YES',\n                isArray: false,\n                hasDefault: schemaItem.COLUMN_DEFAULT !== null\n            }\n            return result\n        }, {} as TableDefinition)\n        return tableDefinition\n    }\n\n    public async getTableTypes (tableSchema: string, tableName: string, customTypes: Set<string>) {\n        const enumTypes = await this.getEnums(tableSchema)\n        const columnComments = await this.getColumnComments(tableSchema, tableName)\n        return mapTableDefinitionToType(\n            this.config, \n            await this.getTableDefinition(tableSchema, tableName), \n            new Set(Object.keys(enumTypes)), \n            customTypes,\n            columnComments\n        )\n    }\n\n    public async getSchemaTables (schemaName: string): Promise<string[]> {\n        const schemaTables = await this.query<{ TABLE_NAME: string }>(`\n            SELECT TABLE_NAME\n            FROM information_schema.columns\n            WHERE table_schema = ?\n            GROUP BY table_name\n        `,\n            [schemaName]\n        )\n        return schemaTables.map((schemaItem: { TABLE_NAME: string }) => schemaItem.TABLE_NAME)\n    }\n\n    public async getColumnComments(schemaName: string, tableName: string) {\n        // See https://stackoverflow.com/a/4946306/388951\n        const commentsResult = await this.query<{\n            table_name: string;\n            column_name: string;\n            description: string;\n        }>(\n            `\n            select column_name, column_type, column_default, column_comment\n            from information_schema.COLUMNS\n            where table_schema = ? and table_name = ?;\n            `,\n            [schemaName, tableName],\n        );\n        return commentsResult.reduce((result, { column_name, description }) => {\n            result[column_name] = description\n            return result\n        }, {} as Record<string, string>)\n    }\n\n    private async query <T>(query: string, args: any[]): Promise<T[]> {\n        const [rows, columns] = await this.db.query<RowDataPacket[]>(query, args)\n        return rows as unknown as T[]\n    }\n}\n"
  },
  {
    "path": "src/schema-postgres.ts",
    "content": "import { Client } from 'pg'\nimport { Config } from './generator'\nimport { TableDefinition, Database, EnumTypes } from './schema-interfaces'\n\nconst mapPostgresTableDefinitionToType = (config: Config, tableDefinition: TableDefinition, enumTypes: Set<string>, customTypes: Set<string>, columnDescriptions: Record<string, string>): TableDefinition => {\n    return Object.entries(tableDefinition).reduce((result, [columnName, column]) => {\n        switch (column.udtName) {\n            case 'bpchar':\n            case 'char':\n            case 'varchar':\n            case 'text':\n            case 'citext':\n            case 'uuid':\n            case 'bytea':\n            case 'inet':\n            case 'time':\n            case 'timetz':\n            case 'interval':\n            case 'tsvector':\n            case 'mol':\n            case 'bfp':\n            case 'bit':\n            case 'name':\n                column.tsType = 'string'\n                break\n            case 'int8':\n                if(config.config.bigint) {\n                    column.tsType = 'bigint'\n                } else {\n                    column.tsType = 'number'\n                }\n                break\n            case 'int2':\n            case 'int4':\n            case 'float4':\n            case 'float8':\n            case 'numeric':\n            case 'money':\n            case 'oid':\n                column.tsType = 'number'\n                break\n            case 'bool':\n                column.tsType = 'boolean'\n                break\n            case 'json':\n            case 'jsonb':\n                column.tsType = 'unknown'\n                if (columnDescriptions[columnName]) {\n                    const type = /@type \\{([^}]+)\\}/.exec(columnDescriptions[columnName])\n                    if (type) {\n                        column.tsType = type[1].trim()\n                        customTypes.add(column.tsType)\n                    }\n                }\n                break\n            case 'date':\n            case 'timestamp':\n            case 'timestamptz':\n                column.tsType = 'Date'\n                break\n            case 'point':\n                column.tsType = '{ x: number, y: number }'\n                break\n            default:\n                if (enumTypes.has(column.udtName)) {\n                    column.tsType = config.transformTypeName(column.udtName)\n                    break\n                } else {\n                    const warning = `Type [${column.udtName} has been mapped to [any] because no specific type has been found.`\n                    if (config.throwOnMissingType) {\n                        throw new Error(warning)\n                    }\n                    console.log(`Type [${column.udtName} has been mapped to [any] because no specific type has been found.`)\n                    column.tsType = 'any'\n                    break\n                }\n        }\n        result[columnName] = column\n        return result\n    }, {} as TableDefinition)\n}\n\nexport class PostgresDatabase implements Database {\n    private db: Client\n    public version: string = ''\n\n    constructor(private config: Config, private connectionString?: string) {\n        this.db = new Client(connectionString)\n    }\n\n    public async isReady() {\n        await this.db.connect()\n        this.connectionString = `postgres://username:password@${this.db.host}:${this.db.port}/${this.db.database}`\n        const result = await this.db.query<{ version: string }>(`SELECT version()`)\n        this.version = result.rows[0].version\n    }\n\n    public async close() {\n        await this.db.end()\n    }\n\n    public getConnectionString(): string {\n        return this.connectionString!\n    }\n\n    public getDefaultSchema(): string {\n        return 'public'\n    }\n\n    public async getEnums(schema: string): Promise<EnumTypes> {\n        const results = await this.db.query<{ name: string, value: string }>(`\n            SELECT n.nspname as schema, t.typname as name, e.enumlabel as value\n            FROM pg_type t\n            JOIN pg_enum e ON t.oid = e.enumtypid\n            JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace\n            WHERE n.nspname = $1\n        `, [schema])\n        return results.rows.reduce((result, { name, value }) => {\n            let values = result[name] || []\n            values.push(value)\n            result[name] = values\n            return result\n        }, {} as EnumTypes)\n    }\n\n    public async getTableDefinition(tableSchema: string, tableName: string) {\n        const result = await this.db.query<{ column_name: string, udt_name: string, is_nullable: string, has_default: boolean }>(`\n            SELECT column_name, udt_name, is_nullable, column_default IS NOT NULL as has_default\n            FROM information_schema.columns\n            WHERE table_name = $1 and table_schema = $2\n        `, [tableName, tableSchema])\n        if (result.rows.length === 0) {\n            console.error(`Missing table: ${tableSchema}.${tableName}`)\n        }\n        // https://www.developerfiles.com/adding-and-retrieving-comments-on-postgresql-tables/\n        return result.rows.reduce((result, { column_name, udt_name, is_nullable, has_default }) => {\n            result[column_name] = {\n                udtName: udt_name.replace(/^_/, ''),\n                nullable: is_nullable === 'YES',\n                isArray: udt_name.startsWith('_'),\n                hasDefault: has_default,\n            }\n            return result\n        }, {} as TableDefinition)\n    }\n\n    public async getTableTypes(tableSchema: string, tableName: string, customTypes: Set<string>) {\n        const enumTypes = await this.getEnums(tableSchema)\n        const columnComments = await this.getColumnComments(tableSchema, tableName)\n        return mapPostgresTableDefinitionToType(\n            this.config, \n            await this.getTableDefinition(tableSchema, tableName), \n            new Set(Object.keys(enumTypes)), \n            customTypes,\n            columnComments\n        )\n    }\n\n    public async getSchemaTables(schemaName: string): Promise<string[]> {\n        const result = await this.db.query(`\n            SELECT table_name\n            FROM information_schema.columns\n            WHERE table_schema = $1\n            GROUP BY table_name\n        `, [schemaName])\n        if (result.rows.length === 0) {\n            console.error(`Missing schema: ${schemaName}`)\n        }\n        return result.rows.map(({ table_name }) => table_name)\n    }\n\n    /**\n        public async getPrimaryKeys(schemaName: string) {\n        interface PrimaryKeyDefinition {\n            table_name: string;\n            constraint_name: string;\n            ordinal_position: number;\n            key_column: string;\n        }\n\n        // https://dataedo.com/kb/query/postgresql/list-all-primary-keys-and-their-columns\n        const keysResult: PrimaryKeyDefinition[] = await this.db.query(\n            `\n                SELECT\n                    kcu.table_name,\n                    tco.constraint_name,\n                    kcu.ordinal_position as position,\n                    kcu.column_name as key_column\n                FROM information_schema.table_constraints tco\n                JOIN information_schema.key_column_usage kcu\n                    on kcu.constraint_name = tco.constraint_name\n                    and kcu.constraint_schema = tco.constraint_schema\n                    and kcu.constraint_name = tco.constraint_name\n                WHERE tco.constraint_type = 'PRIMARY KEY'\n                  AND kcu.table_schema = $1\n                ORDER BY kcu.table_name,\n                         position;\n            `,\n            [schemaName],\n        );\n\n        return []\n    }\n    **/\n\n    public async getColumnComments(schemaName: string, tableName: string) {\n        // See https://stackoverflow.com/a/4946306/388951\n        const commentsResult = await this.db.query<{\n            table_name: string;\n            column_name: string;\n            description: string;\n        }>(\n            `\n                SELECT\n                    c.table_name,\n                    c.column_name,\n                    pgd.description\n                FROM pg_catalog.pg_statio_all_tables AS st\n                INNER JOIN pg_catalog.pg_description pgd ON (pgd.objoid=st.relid)\n                INNER JOIN information_schema.columns c ON (\n                    pgd.objsubid=c.ordinal_position AND\n                    c.table_schema=st.schemaname AND\n                    c.table_name=st.relname\n                )\n                WHERE c.table_schema = $1 and c.table_name = $2\n            `,\n            [schemaName, tableName],\n        );\n        return commentsResult.rows.reduce((result, { column_name, description }) => {\n            result[column_name] = description\n            return result\n        }, {} as Record<string, string>)\n    }\n\n        /**\n    public async getTableComments(schemaName: string) {\n        interface TableComment {\n            table_name: string;\n            description: string;\n        }\n        const comments: TableComment[] = await this.db.query(\n            `\n                SELECT\n                    t.table_name,\n                    pgd.description\n                FROM pg_catalog.pg_statio_all_tables AS st\n                INNER JOIN pg_catalog.pg_description pgd ON (pgd.objoid=st.relid)\n                INNER JOIN information_schema.tables t ON (\n                    t.table_schema=st.schemaname AND\n                    t.table_name=st.relname\n                )\n                WHERE pgd.objsubid = 0\n                  AND t.table_schema = $1;\n            `,\n            [schemaName],\n        );\n\n        return _.fromPairs(comments.map((c) => [c.table_name, c.description]));\n    }\n\n    async getForeignKeys(schemaName: string) {\n        interface ForeignKey {\n            table_name: string;\n            column_name: string;\n            foreign_table_name: string;\n            foreign_column_name: string;\n            conname: string;\n        }\n        // See https://stackoverflow.com/a/10950402/388951\n        const fkeys: ForeignKey[] = await this.db.query(\n            `\n            SELECT\n                cl2.relname AS table_name,\n                att2.attname AS column_name,\n                cl.relname AS foreign_table_name,\n                att.attname AS foreign_column_name,\n                conname\n            FROM\n                (SELECT\n                    unnest(con1.conkey) AS \"parent\",\n                    unnest(con1.confkey) AS \"child\",\n                    con1.confrelid,\n                    con1.conrelid,\n                    con1.conname\n                FROM pg_class cl\n                JOIN pg_namespace ns ON cl.relnamespace = ns.oid\n                JOIN pg_constraint con1 ON con1.conrelid = cl.oid\n                WHERE ns.nspname = $1 AND con1.contype = 'f'\n                ) con\n            JOIN pg_attribute att ON att.attrelid = con.confrelid and att.attnum = con.child\n            JOIN pg_class cl ON cl.oid = con.confrelid\n            JOIN pg_class cl2 ON cl2.oid = con.conrelid\n            JOIN pg_attribute att2 ON att2.attrelid = con.conrelid AND att2.attnum = con.parent\n            `,\n            [schemaName],\n        );\n\n        // Multi-column foreign keys are harder to model.\n        // To get consistent outputs, just ignore them for now.\n        const countKey = (fk: ForeignKey) => `${fk.table_name},${fk.conname}`;\n        const colCounts = _.countBy(fkeys, countKey);\n\n        return _(fkeys)\n            .filter((c) => colCounts[countKey(c)] < 2)\n            .groupBy((c) => c.table_name)\n            .mapValues((tks) =>\n                _.fromPairs(\n                    tks.map((ck) => [\n                        ck.column_name,\n                        { table: ck.foreign_table_name, column: ck.foreign_column_name },\n                    ]),\n                ),\n            )\n            .value();\n    }\n\n    async getMeta(schemaName: string): Promise<Metadata> {\n        if (this.metadata && schemaName === this.metadata.schema) {\n            return this.metadata;\n        }\n\n        const [\n            enumTypes,\n            tableToKeys,\n            foreignKeys,\n            columnComments,\n            tableComments,\n        ] = await Promise.all([\n            this.getEnumTypes(),\n            this.getPrimaryKeys(schemaName),\n            this.getForeignKeys(schemaName),\n            this.getColumnComments(schemaName),\n            this.getTableComments(schemaName),\n        ]);\n\n        const metadata: Metadata = {\n            schema: schemaName,\n            enumTypes,\n            tableToKeys,\n            foreignKeys,\n            columnComments,\n            tableComments,\n        };\n\n        this.metadata = metadata;\n        return metadata;\n    }\n    */\n}\n"
  },
  {
    "path": "tsconfig.json",
    "content": "{\n    \"compilerOptions\": {\n        \"module\": \"commonjs\",\n        \"target\": \"es5\",\n        \"lib\": [\"es2020\"],\n        \"strict\": true,\n        \"noImplicitAny\": true,\n        \"declaration\": true,\n        \"strictNullChecks\": true,\n        \"sourceMap\": true,\n        \"outDir\": \"dist\",\n        \"esModuleInterop\": true,\n        \"resolveJsonModule\": true\n    },\n    \"exclude\": [\"node_modules\"]\n}\n"
  }
]