Full Code of vramework/schemats for AI

main 89967d03ae16 cached
22 files
49.2 KB
12.2k tokens
53 symbols
1 requests
Download .txt
Repository: vramework/schemats
Branch: main
Commit: 89967d03ae16
Files: 22
Total size: 49.2 KB

Directory structure:
gitextract_tv4k2ze4/

├── .github/
│   └── workflows/
│       ├── main.yml
│       └── publish.yml
├── .gitignore
├── .npmignore
├── .yarnrc.yml
├── CHANGELOG.md
├── LICENSE
├── README.md
├── bin/
│   ├── schemats-mysql.ts
│   ├── schemats-postgres.ts
│   └── schemats.ts
├── example/
│   ├── create-db.ts
│   ├── db-custom-types.ts
│   ├── db-types.ts
│   └── schema.sql
├── package.json
├── src/
│   ├── config.ts
│   ├── generator.ts
│   ├── schema-interfaces.ts
│   ├── schema-mysql.ts
│   └── schema-postgres.ts
└── tsconfig.json

================================================
FILE CONTENTS
================================================

================================================
FILE: .github/workflows/main.yml
================================================
name: Build
run-name: Building changes
on: [push]
jobs:
  main:
    runs-on: ubuntu-latest
    services:
      postgres:
        image: postgres:alpine
        env:
          POSTGRES_USER: postgres
          POSTGRES_PASSWORD: password
          POSTGRES_DB: schemats
        # Set health checks to wait until postgres has started
        options: >-
          --health-cmd pg_isready
          --health-interval 10s
          --health-timeout 5s
          --health-retries 5
        ports:
          # Maps tcp port 5432 on service container to the host
          - 5432:5432
    steps:
      - name: Check out repository code
        uses: actions/checkout@v4
      - run: yarn install
      - run: yarn run build
      - run: yarn run example:postgres


================================================
FILE: .github/workflows/publish.yml
================================================
name: Publish Package to npmjs
on:
  release:
    types: [published]
jobs:
  build:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      # Setup .npmrc file to publish to npm
      - uses: actions/setup-node@v4
        with:
          node-version: '20.x'
          registry-url: 'https://registry.npmjs.org'
      - run: yarn install
      - run: yarn run build
      - run: yarn publish
        env:
          NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

================================================
FILE: .gitignore
================================================
# Build
.build

# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*

# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json

# Runtime data
pids
*.pid
*.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

# Coverage directory used by tools like istanbul
coverage
*.lcov

# nyc test coverage
.nyc_output

# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt

# Bower dependency directory (https://bower.io/)
bower_components

# node-waf configuration
.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/
jspm_packages/

# Snowpack dependency directory (https://snowpack.dev/)
web_modules/

# TypeScript cache
*.tsbuildinfo

# Optional npm cache directory
.npm

# Optional eslint cache
.eslintcache

# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/

# Optional REPL history
.node_repl_history

# Output of 'npm pack'
*.tgz

# Yarn Integrity file
.yarn-integrity

# dotenv environment variables file
.env
.env.test
.env.production

# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache

# Next.js build output
.next
out

# Nuxt.js build / generate output
.nuxt
dist

# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public

# vuepress build output
.vuepress/dist

# Serverless directories
.serverless/

# FuseBox cache
.fusebox/

# DynamoDB Local files
.dynamodb/

# TernJS port file
.tern-port

# Stores VSCode versions used for testing VSCode extensions
.vscode-test

# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

================================================
FILE: .npmignore
================================================
.npmrc
node_modules
.circleci

================================================
FILE: .yarnrc.yml
================================================
nodeLinker: node-modules


================================================
FILE: CHANGELOG.md
================================================
## [1.0.8] - 2022.03.20

misc: updating packages
ci: switching to github actions

## [1.0.7] - 2022.11.27

feat: add --no-bigint #17 (wirekang)
feat: add --no-optional #18 (wirekang)
fix: --no-write-header to --no-header (wirekang)
misc: updating packages

## [1.0.6] - 2022.08.29

chore: upgrading dependencies

## [1.0.5] - 2022.06.26

feat: infer bigint type 
chore: upgrading dependencies

## [1.0.4] - 2022.02.22

fix(mysql): for some versions of mysql key casing results in empty hash lookups
feat(postgres): Adding types 'mol', 'bfp' and 'bit' 

## [1.0.3] - 2022.02.22

chore: adding ci

## [1.0.2] - 2022.02.19

chore: Upgrading dependencies

## [1.0.1] - 2022.02.03

fix(postgres): (bchrobot) adding missing cli command throwOnMissingType

fix(postgres): (bchrobot) typo in write-header option

## [0.0.12] - 2021.11.01

fix: typo in CLI

## [0.0.11] - 2021.10.31

feat: adding mysql compatability

This allows you to do the same thing just with mysql using `/bin/schemats mysql $connection_string -s $schema_name `

## [0.0.10] - 2021.09.02

chore: updating all dependencies

feat: add -C --camelCaseTypes option
    
    This option adds the ability to camel case just the type names - which
    gives a good mix between using JS Standard Camel Case and still
    following the actual definitions of database.
    
    The issue with using camel case for both the types and the keys
    is that we would have to provide a layer within the programs using the
    types to convert back to the original form if the attributes are
    different in JS than in the schema.
    
    There are definately
    issues with this, especially with a database schema with an inconsistent
    naming convention - we would have to provide some sort of mapping file
    to acheive correct conversion.
    
    The types on the other hand, only exist in JS and therefore can be named
    whatever we want when generating the types.

fix(schema): add 'tsvector' to string types
    
    Text Search Vectors are a complex type inside of postgres, but can
    generally be expressed as strings within TS.

fix(generator): quote string enum keys
    
    This helps prevent issues in the generated file due to special
    characters like `:` present in the postgres enum keys.

## [0.0.9] - 2021.08.27

doc: adding example documentation
fix: Don't export custom types if empty

## [0.0.8] - 2021.08.22

Feat: Exporting tables and Custom types for typed-postgres

## [0.0.7] - 2021.08.05

Fix: array regression due to bad merge

## [0.0.6] - 2021.08.05

Feat: using the -f flag to reference a file with non DB types and adding comments to columns in postgres using `COMMENT ON COLUMN schema.table.column is '@type {TYPE}';` now allows us to type jsonb columns directly

## [0.0.5] - 2021.07.26

Fix: isArray overrides real value with false

## [0.0.4] - 2021.07.26

Fix: publish dist and src packages

## [0.0.3] - 2021.07.26

Fix: nullable fields are also optional

## [0.0.2] - 2021.07.26

Fix: Adding support for arrays

## [0.0.1] - 2021.06.20

Include README file in published package

## [0.0.0] - 2021.06.20

First release


================================================
FILE: LICENSE
================================================
MIT License

Copyright (c) 2021 Vlandor Ltd
Copyright (c) 2016 SweetIQ

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.


================================================
FILE: README.md
================================================
# Schemats

Before anything, I would like to give a massive thank you to [sweetiq](https://www.npmjs.com/package/schemats) and their contributors for giving me a huge head start.

The reason I have created a new repo instead of a fork is because I don't support mysql and have some breaking changes due to how this library is consumed by [postgres-typed](https://github.com/vramework/postgres-typed) and [vramework](https://vramework.io/).

I have kept the name and based off their MIT license as means of attribution and thanks.

## Why Schemats

Because being able to make a change to your database structure and have it:

- validate through your node backend APIs
- get verified against automatically generate JSON schemas
- raise errors in your frontend application 

Is just a great developer experience in my opinion.

This allows us to some pretty amazing things when it comes to refactoring and maintaining codebases, and
also provide the meta-data to help with libraries like [postgres-typed](https://github.com/vramework/postgres-typed). 

## Quickstart

### Installing

```bash
yarn add -d @vramework/schemats || npm install -d @vramework/schemats
```

### Generating the type definition from schema

Assuming you have the following schema (this is a bit of a random one):

```sql
CREATE SCHEMA "pet_store";

CREATE TYPE "pet_store"."animal" AS enum (
  'cat',
  'dog'
);

CREATE TABLE "pet_store"."user" (
  "uuid" uuid PRIMARY KEY default gen_random_uuid(),
  "name" text NOT NULL
);

CREATE TABLE "pet_store"."pet" (
  "uuid" uuid PRIMARY KEY default gen_random_uuid(),
  "owner" uuid REFERENCES "pet_store"."user",
  "type" pet_store.animal NOT NULL,
  "name" text NOT NULL,
  "birthdate" date,
  "last_seen_location" point,
  "random_facts" jsonb,
  "pet_search_document" tsvector
);
COMMENT ON COLUMN pet_store.pet.random_facts is '@type {RandomPetFacts}';
```

You can now generate a bunch of different schema definitions.

My personal favourite is the following:

```bash
schemats postgres postgres://postgres@localhost/database -f ./db-custom-types.ts -s pet_store -c -e -o db-types.ts
```

While will result in the following typescript file: 

```typescript

/**
 * AUTO-GENERATED FILE @ Fri, 27 Aug 2021 08:26:50 GMT - DO NOT EDIT!
 *
 * This file was automatically generated by schemats v.0.0.8
 * $ schemats generate postgres://username:password@localhost:5432/schemats -C -s pet_store
 *
 */

import { RandomPetFacts } from './db-custom-types'

export enum Animal {
	'Cat' = 'cat',
	'Dog' = 'dog' 
}

export interface User { 
	uuid: string
	name: string 
}

export interface Pet { 
	uuid: string
	owner?: string | null
	type: Animal
	name: string
	birthdate?: Date | null
	lastSeenLocation?: { x: number, y: number } | null
	randomFacts?: RandomPetFacts | null
	moreRandomFacts?: unknown | null 
	petSearchDocument?: string | null
}

export interface Tables {
    user: User,
	pet: Pet
}

export type CustomTypes = RandomPetFacts
```

But you have quite a bit of flexbility:

```bash
Usage: schemats mysql [options] [connection]

Generate a typescript schema from mysql

Arguments:
  connection                   The connection string to use, if left empty will use env variables

Options:
  -s, --schema <schema>        the schema to use (default: "public")
  -t, --tables <tables...>     the tables within the schema
  -f, --typesFile <typesFile>  the file where jsonb types can be imported from
  -c, --camelCase              use camel case for enums, table names, and column names
  -C, --camelCaseTypes         use camel case only for TS names - not modifying the column names
  -e, --enums                  use enums instead of types
  -o, --output <output>        where to save the generated file relative to the current working directory
  --no-header                  don't generate a header
  -h, --help                   display help for command
```

```bash
Generate a typescript schema from mysql

Arguments:
  connection                   The connection string to use, if left empty will use env variables

Options:
  -s, --schema <schema>        the schema to use (default: "public")
  -t, --tables <tables...>     the tables within the schema
  -f, --typesFile <typesFile>  the file where jsonb types can be imported from
  -c, --camelCase              use camel case for enums, table names, and column names
  -C, --camelCaseTypes         use camel case only for TS names - not modifying the column names
  -e, --enums                  use enums instead of types
  -o, --output <output>        where to save the generated file relative to the current working directory
  --no-header                  don't generate a header
  -h, --help                   display help for command
```

## Features

### Camel Case `-c --camelCase, -C --camelCaseTypes`

This automatically turns all your tables and Enums / Types and column names to camelcase, which is the default
experience for javascript and is more consistent to use

You can use Camel Case Types to just camel case the TS entities - leaving the strings representing 
the SQL columns alone.

### Enums `-e --enums`

Using enums turns all postgres enums into Enums instead of normal types, which is just a
preference aspect for developers since renaming enum values or order will change the Enum
key and value.

### Types File `-f --typesFile <typesFile>`

This is a VERY useful feature for jsonb fields. Normally a jsonb field type is unknown, 
however if you provide a types json file this will get the type out of the comment 
of a field and assign it to the value.

The structure of a custom type file could either be from another file:

```typescript
export type { RandomPetFacts }  from './somewhere-else'
```

or it could just be defined straight in the file.

```typescript
export type RandomPetFacts = Record<string, string>
```

### Tables | Custom Types `-t --tables <tables...>`

These types are automatically generated to power typed-postgres

## Using in typescript

You can import all your interfaces / enums from the file:

```typescript
import * as DB from './db-types'

// And then you can start picking how you want your APIs to be used:
type updatePetLocation = Pick<DB.Pet, 'lastSeenAt'>
```

## Tests

So where are the tests? The original schemats library has an amazing 100% coverage and this one has 0.

To be honest, I'm using this library in a few of my current projects and any error in it throws dozens 
in the entire codebase, so it sort of tests itself. That being said I will be looking to add some in again,
but in terms of priorties not my highest.

However for manual testing and experimenting you can easily replicate this project by:

```bash
# Clone the repo
git clone git@github.com:vramework/schemats.git
# Enter repo
cd schemats
# Install dependencies
yarn install
# Run the example, which will run create the schemats library and generate the db-types library
yarn run example:postgres
```



================================================
FILE: bin/schemats-mysql.ts
================================================
import * as commander from 'commander'
import { Config, typescriptOfSchema } from '../src/generator'
import { MysqlDatabase } from '../src/schema-mysql'
import { promises } from 'fs'
import { relative } from 'path'

// work-around for:
// TS4023: Exported variable 'command' has or is using name 'local.Command'
// from external module "node_modules/commander/typings/index" but cannot be named.
export type Command = commander.Command

export const mysql = async (program: Command): Promise<void> => {
    program
        .command('mysql')
        .description('Generate a typescript schema from mysql')
        .argument('[connection]', 'The connection string to use, if left empty will use env variables')
        .option('-s, --schema <schema>', 'the schema to use', 'public')
        .option('-t, --tables <tables...>', 'the tables within the schema')
        .option('-c, --camelCase', 'use camel case for enums, table names, and column names')
        .option('-e, --enums', 'use enums instead of types')
        .option('-o, --output <output>', 'where to save the generated file relative to the current working directory')
        .option('--no-header', 'don\'t generate a header')
        .option('--no-bigint', 'use number instead of bigint')
        .option('--no-optional', 'don\'t make nullable field optional')
        .action(async (connection, rest) => {
            const config = new Config(rest)
            const database = new MysqlDatabase(config, connection)
            await database.isReady()
            const schema = await typescriptOfSchema(config, database)
            if (rest.output) {
                const outputPath = relative(process.cwd(), rest.output)
                await promises.writeFile(outputPath, schema, 'utf8')
                console.log(`Written schema to ${outputPath}`)
            } else {
                console.log(schema)
            }
            await database.close()
        })

  program.action(program.help)
}


================================================
FILE: bin/schemats-postgres.ts
================================================
import * as commander from 'commander'
import { Config, typescriptOfSchema } from '../src/generator'
import { PostgresDatabase } from '../src/schema-postgres'
import { promises } from 'fs'
import { relative } from 'path'

// work-around for:
// TS4023: Exported variable 'command' has or is using name 'local.Command'
// from external module "node_modules/commander/typings/index" but cannot be named.
export type Command = commander.Command

export const postgres = async (program: Command): Promise<void> => {
    program
        .command('postgres')
        .arguments('[connection]')
        .option('-s, --schema <schema>', 'the schema to use', 'public')
        .option('-t, --tables <tables...>', 'the tables within the schema')
        .option('-f, --typesFile <typesFile>', 'the file where jsonb types can be imported from')
        .option('-c, --camelCase', 'use camel case for enums, table names, and column names')
        .option('-C, --camelCaseTypes', 'use camel case only for TS names - not modifying the column names')
        .option('-e, --enums', 'use enums instead of types')
        .option('-o, --output <output>', 'where to save the generated file relative to the current working directory')
        .option('--no-header', 'don\'t generate a header')
        .option('--no-throw-on-missing-type', 'don\'t throw an error when pg type cannot be mapped to ts type')
        .option('--no-bigint', 'use number instead of bigint')
        .option('--no-optional', 'don\'t make nullable field optional')
        .description('Generate a typescript schema from postgres', {
            connection: 'The connection string to use, if left empty will use env variables'
        })
        .action(async (connection, rest) => {
            const config = new Config(rest)
            const database = new PostgresDatabase(config, connection)
            await database.isReady()
            const schema = await typescriptOfSchema(config, database)
            if (rest.output) {
                const outputPath = relative(process.cwd(), rest.output)
                await promises.writeFile(outputPath, schema, 'utf8')
                console.log(`Written schema to ${outputPath}`)
            } else {
                console.log(schema)
            }
            await database.close()
        })

  program.action(program.help)
}


================================================
FILE: bin/schemats.ts
================================================
#!/usr/bin/env node
import { version } from '../package.json'

import { Command } from 'commander'
import { postgres } from './schemats-postgres'
import { mysql } from './schemats-mysql'

const program = new Command('schemats')
program.usage('[command]').version(version.toString())

postgres(program)
mysql(program)

program.parseAsync(process.argv)

================================================
FILE: example/create-db.ts
================================================
import { promises } from 'fs'
import { Client } from 'pg'

const createDB = async () => {
    const db = new Client('postgres://postgres:password@localhost/postgres')
    await db.connect()
    const r = await db.query(`SELECT 'CREATE DATABASE schemats' as create WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'schemats')`)
    const createSql = r.rows[0]?.create
    if (createSql) {
        await db.query(createSql)
    }    
    await db.end()
}

const main = async () => {
    await createDB()

    const db = new Client('postgres://postgres:password@localhost/schemats')
    await db.connect()
    await db.query<{ version: string }>(`SELECT version()`)
    await db.query(await promises.readFile(`${__dirname}/schema.sql`, 'utf-8'))
    await db.end()

}

main()

================================================
FILE: example/db-custom-types.ts
================================================
export type RandomPetFacts = Record<string, string>

================================================
FILE: example/db-types.ts
================================================

/**
 * AUTO-GENERATED FILE @ Wed, 20 Mar 2024 14:40:42 GMT - DO NOT EDIT!
 *
 * This file was automatically generated by schemats v.1.0.7
 * $ schemats generate postgres://username:password@localhost:5432/schemats -C -s pet_store
 *
 */

import { RandomPetFacts } from './db-custom-types'



export enum Animal {
  'Dog' = 'dog',
  'Cat' = 'cat' 
}

export interface User { 
  uuid: string
  name: string 
}

export interface Pet { 
  uuid: string
  owner?: string | null
  type: Animal
  name: string
  birthdate?: Date | null
  lastSeenLocation?: { x: number, y: number } | null
  randomFacts?: RandomPetFacts | null
  moreRandomFacts?: unknown | null
  cuteName?: string | null 
}

export interface Tables {
  user: User,
  pet: Pet
}

export type CustomTypes = RandomPetFacts

================================================
FILE: example/schema.sql
================================================
CREATE EXTENSION IF NOT EXISTS pgcrypto;

DROP SCHEMA IF EXISTS "pet_store" CASCADE;
CREATE SCHEMA "pet_store";

CREATE TYPE "pet_store"."animal" AS enum (
  'cat',
  'dog'
);

CREATE TABLE "pet_store"."user" (
  "uuid" uuid PRIMARY KEY default gen_random_uuid(),
  "name" text NOT NULL
);

CREATE TABLE "pet_store"."pet" (
  "uuid" uuid PRIMARY KEY default gen_random_uuid(),
  "owner" uuid REFERENCES "pet_store"."user",
  "type" pet_store.animal NOT NULL,
  "name" text NOT NULL,
  "birthdate" date,
  "last_seen_location" point,
  "random_facts" jsonb,
  "more_random_facts" jsonb,
  "cute_name" tsvector
);
COMMENT ON COLUMN pet_store.pet.random_facts is '@type {RandomPetFacts}';

================================================
FILE: package.json
================================================
{
  "name": "@vramework/schemats",
  "version": "1.0.8",
  "description": "Generate typescript interface definitions from postgres SQL database schema",
  "keywords": [
    "postgres",
    "schema",
    "typescript",
    "sql"
  ],
  "main": "./dist/index.js",
  "types": "./dist/index.d.ts",
  "scripts": {
    "ncu": "ncu",
    "build": "rm -rf dist && tsc",
    "example:create-db": "ts-node example/create-db.ts",
    "example:generate:postgres": "ts-node ./bin/schemats postgres postgres://postgres:password@localhost/schemats -s pet_store -o example/db-types.ts -f ./db-custom-types -c -e",
    "example:postgres": "yarn run example:create-db && yarn run example:generate:postgres"
  },
  "bin": "dist/bin/schemats.js",
  "repository": {
    "type": "git",
    "url": "https://github.com/vramework/schemats.git"
  },
  "bugs": {
    "url": "https://github.com/vramework/schemats/issues"
  },
  "author": "Vlandor Ltd",
  "contributors": [
    "Mengxuan Xia <xiamx2004@gmail.com>",
    "Arnaud Benhamdine <arnaud.benhamdine@gmail.com>",
    "zigomir <zigomir@gmail.com>",
    "Mark Crisp <macr1324@gmail.com>"
  ],
  "license": "MIT",
  "devDependencies": {
    "@types/node": "^20.11.30",
    "@types/pg": "^8.11.3",
    "@types/sinon": "^17.0.3",
    "ts-node": "^10.9.2",
    "typescript": "^5.4.2"
  },
  "dependencies": {
    "camelcase": "^6",
    "commander": "^12.0.0",
    "mysql2": "^3.9.2",
    "pg": "^8.11.3"
  }
}


================================================
FILE: src/config.ts
================================================
import camelCase from 'camelcase'

export interface ConfigValues {
    schema: string
    tables: string[]
    camelCase?: boolean
    camelCaseTypes?: boolean
    header?: boolean
    typesFile?: boolean
    throwOnMissingType?: boolean
    enums?: boolean
    bigint?: boolean
    optional?: boolean
}

export class Config {
    constructor (public config: Partial<ConfigValues> & Pick<ConfigValues, 'schema' | 'tables'>) {
        this.config = {
            header: true,
            camelCase: false,
            throwOnMissingType: true,
            enums: false,
            bigint: true,
            optional: true,
            ...config
        }
    }

    public getCLICommand (dbConnection: string): string {
        const commands = ['schemats', 'generate', dbConnection]
        if (this.config.camelCase) {
            commands.push('-C')
        }
        if (this.config.tables?.length > 0) {
            commands.push('-t', this.config.tables.join(' '))
        }
        if (this.config.schema) {
            commands.push(`-s ${this.config.schema}`)
        }
        return commands.join(' ')
    }

    public get enums () {
        return this.config.enums
    }

    public get tables () {
        return this.config.tables
    }

    public get schema () {
        return this.config.schema
    }

    public get writeHeader () {
        return this.config.header
    }

    public get typesFile () {
        return this.config.typesFile
    }

    public get throwOnMissingType () {
        return this.config.throwOnMissingType
    }

    public transformTypeName (typename: string) {
        return (this.config.camelCase || this.config.camelCaseTypes) ? camelCase(typename, { pascalCase: true }) : typename
    }

    public transformColumnName (columnName: string) {
        return this.config.camelCase ? camelCase(columnName) : columnName
    }
}


================================================
FILE: src/generator.ts
================================================
import { Config, ConfigValues } from './config'
import { version } from '../package.json'
import { Database } from './schema-interfaces'
import camelcase from 'camelcase'
import { EnumTypes, TableDefinition } from './schema-interfaces'

const generateHeader = (config: Config, db: Database): string => {
    return `
/**
 * AUTO-GENERATED FILE @ ${new Date().toUTCString()} - DO NOT EDIT!
 *
 * This file was automatically generated by schemats v.${version}
 * $ ${config.getCLICommand(db.getConnectionString())}
 *
 */`
}

const reservedJSNames = new Set(['string', 'number', 'package'])
const normalizeName = (name: string): string => reservedJSNames.has('name') ? `${name}_` : name

export function generateEnum(config: Config, enumObject: EnumTypes): string[] {
    const enumStrings = []
    for (let enumNameRaw in enumObject) {
        const enumName = config.transformTypeName(enumNameRaw)
        if (config.enums) {
            enumStrings.push(`export enum ${enumName} {\n${enumObject[enumNameRaw].map((v: string) => `  '${camelcase(v, { pascalCase: true })}' = '${v}'`).join(',\n')} \n}`)
        } else {
            enumStrings.push(`export type ${enumName} = ${enumObject[enumNameRaw].map((v: string) => `'${v}'`).join(' | ')}`)
        }
    }
    return enumStrings
}

export function generateTableInterface(config: Config, tableNameRaw: string, tableDefinition: TableDefinition) {
    const tableName = config.transformTypeName(tableNameRaw)
    let members = ''
    const entries = Object.entries(tableDefinition)
    for (const [name, { tsType, nullable, isArray }] of entries) {
        const columnName = config.transformColumnName(name)
        members += `\n  ${normalizeName(columnName)}${nullable && config.config.optional ? '?' : ''}: ${tsType}${isArray ? '[]' : ''}${nullable ? ' | null' : ''}`
    }
    return `export interface ${normalizeName(tableName)} { ${members} \n}`
}

export const typescriptOfTable = async (config: Config, db: Database, schema: string, table: string, types: Set<string>) => {
    const tableTypes = await db.getTableTypes(schema, table, types)
    return generateTableInterface(config, table, tableTypes)
}

export const typescriptLookupForTables = (config: Config, tables: string[]): string => {
    const types = tables.map(t => `${t}: ${config.transformTypeName(t)}`)
    return `export interface Tables {
  ${types.join(',\n  ')}
}`
}

export const typescriptOfSchema = async (config: Config, db: Database): Promise<string> => {
    const schema = config.schema || await db.getDefaultSchema()
    const tables = config.tables || await db.getSchemaTables(schema)
    const enums = await db.getEnums(schema)
    const enumTypes = generateEnum(config, enums)
    const jsonTypesToImport = new Set<string>()
    const interfaces = await Promise.all(tables.map(table => typescriptOfTable(config, db, schema, table, jsonTypesToImport)))
    const output = [enumTypes.join('\n\n'), interfaces.join('\n\n')]

    if (config.typesFile && jsonTypesToImport.size) {
        output.unshift(`import { ${Array.from(jsonTypesToImport).join(', ')} } from '${config.typesFile}'\n\n`)
    }
    
    if (config.writeHeader) {
        output.unshift(generateHeader(config, db))
    }
    
    output.push(typescriptLookupForTables(config, tables))

    if (jsonTypesToImport.size) {
        output.push(`export type CustomTypes = ${Array.from(jsonTypesToImport).join(' | ')}`)
    }
    
    return output.join('\n\n')
}

export {
    Config,
    ConfigValues
}


================================================
FILE: src/schema-interfaces.ts
================================================
export interface ForeignKey {
    table: string;
    column: string;
}

export interface ColumnDefinition {
    udtName: string,
    nullable: boolean,
    tsType?: string
    isArray: boolean
    comment?: string;
    foreignKey?: ForeignKey
    hasDefault: boolean
}

export interface Metadata {
    schema: string;
    enumTypes: any
    foreignKeys: Record<string, { [columnName: string]: ForeignKey }>
    tableToKeys: Record<string, string>
    columnComments: Record<string, Record<string, string>>
    tableComments: Record<string, string>
}

export type EnumTypes = Record<string, string[]>
export type TableDefinition = Record<string, ColumnDefinition>

export interface Database {
    version: string
    getConnectionString: () => string
    isReady(): Promise<void>
    close(): Promise<void>
    getDefaultSchema(): string
    getEnums(schemaName: string): Promise<EnumTypes>
    getTableDefinition(schemaName: string, tableName: string): Promise<TableDefinition>
    getTableTypes(schemaName: string, tableName: string, types: Set<string>): Promise<TableDefinition>
    getSchemaTables(schemaName: string): Promise<string[]>
}


================================================
FILE: src/schema-mysql.ts
================================================
import { Config } from './generator'
import { TableDefinition, Database, EnumTypes } from './schema-interfaces'
import { Connection, createConnection, RowDataPacket } from 'mysql2/promise'

// uses the type mappings from https://github.com/mysqljs/ where sensible
const mapTableDefinitionToType = (config: Config, tableDefinition: TableDefinition, enumTypes: Set<string>, customTypes: Set<string>, columnDescriptions: Record<string, string>): TableDefinition => {
    return Object.entries(tableDefinition).reduce((result, [columnName, column]) => {
        switch (column.udtName) {
            case 'char':
            case 'varchar':
            case 'text':
            case 'tinytext':
            case 'mediumtext':
            case 'longtext':
            case 'time':
            case 'geometry':
            case 'set':
            case 'enum':
                // keep set and enum defaulted to string if custom type not mapped
                column.tsType = 'string'
                break
            case 'bigint':
                if(config.config.bigint) {
                    column.tsType = 'bigint'
                } else {
                    column.tsType = 'number'
                }
                break
            case 'integer':
            case 'int':
            case 'smallint':
            case 'mediumint':
            case 'double':
            case 'decimal':
            case 'numeric':
            case 'float':
            case 'year':
                column.tsType = 'number'
                break
            case 'tinyint':
                column.tsType = 'boolean'
                break
            case 'json':
                column.tsType = 'unknown'
                if (columnDescriptions[columnName]) {
                    const type = /@type \{([^}]+)\}/.exec(columnDescriptions[columnName])
                    if (type) {
                        column.tsType = type[1].trim()
                        customTypes.add(column.tsType)
                    }
                }
                break
            case 'date':
            case 'datetime':
            case 'timestamp':
                column.tsType = 'Date'
                break
            case 'tinyblob':
            case 'mediumblob':
            case 'longblob':
            case 'blob':
            case 'binary':
            case 'varbinary':
            case 'bit':
                column.tsType = 'Buffer'
                break
            default:
                if (enumTypes.has(column.udtName)) {
                    column.tsType = config.transformTypeName(column.udtName)
                    break
                } else {
                    const warning = `Type [${column.udtName} has been mapped to [any] because no specific type has been found.`
                    if (config.throwOnMissingType) {
                        throw new Error(warning)
                    }
                    console.log(`Type [${column.udtName} has been mapped to [any] because no specific type has been found.`)
                    column.tsType = 'any'
                    break
                }
        }
        result[columnName] = column
        return result
    }, {} as TableDefinition)
}

const parseMysqlEnumeration = (mysqlEnum: string): string[] => {
    return mysqlEnum.replace(/(^(enum|set)\('|'\)$)/gi, '').split(`','`)
}

const getEnumNameFromColumn = (dataType: string, columnName: string): string => {
    return `${dataType}_${columnName}`
}

export class MysqlDatabase implements Database {
    public version: string = ''
    private db!: Connection

    constructor (private config: Config, public connectionString: string) {
    }

    public async isReady(): Promise<void> {
        this.db = await createConnection(this.connectionString)
    }

    public async close(): Promise<void> {
        await this.db.destroy()
    }

    public getConnectionString (): string {
        return this.connectionString
    }

    public getDefaultSchema (): string {
        return 'public'
    }

    public async getEnums(schema: string): Promise<EnumTypes> {
        const rawEnumRecords = await this.query<{ COLUMN_NAME: string, COLUMN_TYPE: string, DATA_TYPE: string }>(`
            SELECT COLUMN_NAME, COLUMN_TYPE, DATA_TYPE
            FROM information_schema.columns
            WHERE data_type IN ('enum', 'set') and table_schema = ?
        `, [schema])
        return rawEnumRecords.reduce((result, { COLUMN_NAME, COLUMN_TYPE, DATA_TYPE }) => {
            const enumName = getEnumNameFromColumn(DATA_TYPE, COLUMN_NAME)
            const enumValues = parseMysqlEnumeration(COLUMN_TYPE)
            if (result[enumName] && JSON.stringify(result[enumName]) !== JSON.stringify(enumValues)) {
                throw new Error(
                    `Multiple enums with the same name and contradicting types were found: ${COLUMN_NAME}: ${JSON.stringify(result[enumName])} and ${JSON.stringify(enumValues)}`
                )
            }
            result[enumName] = enumValues
            return result
        }, {} as EnumTypes)
    }

    public async getTableDefinition (tableSchema: string, tableName: string): Promise<TableDefinition> {
        const tableColumns = await this.query<{ COLUMN_NAME: string, DATA_TYPE: string, IS_NULLABLE: string, COLUMN_DEFAULT: string }>(`
            SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_DEFAULT
            FROM information_schema.columns
            WHERE table_name = ? and table_schema = ?`,
            [tableName, tableSchema]
        )
        const tableDefinition = tableColumns.reduce((result, schemaItem) => {
            const columnName = schemaItem.COLUMN_NAME
            const dataType = schemaItem.DATA_TYPE
            result[columnName] = {
                udtName: /^(enum|set)$/i.test(dataType) ? getEnumNameFromColumn(dataType, columnName) : dataType,
                nullable: schemaItem.IS_NULLABLE === 'YES',
                isArray: false,
                hasDefault: schemaItem.COLUMN_DEFAULT !== null
            }
            return result
        }, {} as TableDefinition)
        return tableDefinition
    }

    public async getTableTypes (tableSchema: string, tableName: string, customTypes: Set<string>) {
        const enumTypes = await this.getEnums(tableSchema)
        const columnComments = await this.getColumnComments(tableSchema, tableName)
        return mapTableDefinitionToType(
            this.config, 
            await this.getTableDefinition(tableSchema, tableName), 
            new Set(Object.keys(enumTypes)), 
            customTypes,
            columnComments
        )
    }

    public async getSchemaTables (schemaName: string): Promise<string[]> {
        const schemaTables = await this.query<{ TABLE_NAME: string }>(`
            SELECT TABLE_NAME
            FROM information_schema.columns
            WHERE table_schema = ?
            GROUP BY table_name
        `,
            [schemaName]
        )
        return schemaTables.map((schemaItem: { TABLE_NAME: string }) => schemaItem.TABLE_NAME)
    }

    public async getColumnComments(schemaName: string, tableName: string) {
        // See https://stackoverflow.com/a/4946306/388951
        const commentsResult = await this.query<{
            table_name: string;
            column_name: string;
            description: string;
        }>(
            `
            select column_name, column_type, column_default, column_comment
            from information_schema.COLUMNS
            where table_schema = ? and table_name = ?;
            `,
            [schemaName, tableName],
        );
        return commentsResult.reduce((result, { column_name, description }) => {
            result[column_name] = description
            return result
        }, {} as Record<string, string>)
    }

    private async query <T>(query: string, args: any[]): Promise<T[]> {
        const [rows, columns] = await this.db.query<RowDataPacket[]>(query, args)
        return rows as unknown as T[]
    }
}


================================================
FILE: src/schema-postgres.ts
================================================
import { Client } from 'pg'
import { Config } from './generator'
import { TableDefinition, Database, EnumTypes } from './schema-interfaces'

const mapPostgresTableDefinitionToType = (config: Config, tableDefinition: TableDefinition, enumTypes: Set<string>, customTypes: Set<string>, columnDescriptions: Record<string, string>): TableDefinition => {
    return Object.entries(tableDefinition).reduce((result, [columnName, column]) => {
        switch (column.udtName) {
            case 'bpchar':
            case 'char':
            case 'varchar':
            case 'text':
            case 'citext':
            case 'uuid':
            case 'bytea':
            case 'inet':
            case 'time':
            case 'timetz':
            case 'interval':
            case 'tsvector':
            case 'mol':
            case 'bfp':
            case 'bit':
            case 'name':
                column.tsType = 'string'
                break
            case 'int8':
                if(config.config.bigint) {
                    column.tsType = 'bigint'
                } else {
                    column.tsType = 'number'
                }
                break
            case 'int2':
            case 'int4':
            case 'float4':
            case 'float8':
            case 'numeric':
            case 'money':
            case 'oid':
                column.tsType = 'number'
                break
            case 'bool':
                column.tsType = 'boolean'
                break
            case 'json':
            case 'jsonb':
                column.tsType = 'unknown'
                if (columnDescriptions[columnName]) {
                    const type = /@type \{([^}]+)\}/.exec(columnDescriptions[columnName])
                    if (type) {
                        column.tsType = type[1].trim()
                        customTypes.add(column.tsType)
                    }
                }
                break
            case 'date':
            case 'timestamp':
            case 'timestamptz':
                column.tsType = 'Date'
                break
            case 'point':
                column.tsType = '{ x: number, y: number }'
                break
            default:
                if (enumTypes.has(column.udtName)) {
                    column.tsType = config.transformTypeName(column.udtName)
                    break
                } else {
                    const warning = `Type [${column.udtName} has been mapped to [any] because no specific type has been found.`
                    if (config.throwOnMissingType) {
                        throw new Error(warning)
                    }
                    console.log(`Type [${column.udtName} has been mapped to [any] because no specific type has been found.`)
                    column.tsType = 'any'
                    break
                }
        }
        result[columnName] = column
        return result
    }, {} as TableDefinition)
}

export class PostgresDatabase implements Database {
    private db: Client
    public version: string = ''

    constructor(private config: Config, private connectionString?: string) {
        this.db = new Client(connectionString)
    }

    public async isReady() {
        await this.db.connect()
        this.connectionString = `postgres://username:password@${this.db.host}:${this.db.port}/${this.db.database}`
        const result = await this.db.query<{ version: string }>(`SELECT version()`)
        this.version = result.rows[0].version
    }

    public async close() {
        await this.db.end()
    }

    public getConnectionString(): string {
        return this.connectionString!
    }

    public getDefaultSchema(): string {
        return 'public'
    }

    public async getEnums(schema: string): Promise<EnumTypes> {
        const results = await this.db.query<{ name: string, value: string }>(`
            SELECT n.nspname as schema, t.typname as name, e.enumlabel as value
            FROM pg_type t
            JOIN pg_enum e ON t.oid = e.enumtypid
            JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
            WHERE n.nspname = $1
        `, [schema])
        return results.rows.reduce((result, { name, value }) => {
            let values = result[name] || []
            values.push(value)
            result[name] = values
            return result
        }, {} as EnumTypes)
    }

    public async getTableDefinition(tableSchema: string, tableName: string) {
        const result = await this.db.query<{ column_name: string, udt_name: string, is_nullable: string, has_default: boolean }>(`
            SELECT column_name, udt_name, is_nullable, column_default IS NOT NULL as has_default
            FROM information_schema.columns
            WHERE table_name = $1 and table_schema = $2
        `, [tableName, tableSchema])
        if (result.rows.length === 0) {
            console.error(`Missing table: ${tableSchema}.${tableName}`)
        }
        // https://www.developerfiles.com/adding-and-retrieving-comments-on-postgresql-tables/
        return result.rows.reduce((result, { column_name, udt_name, is_nullable, has_default }) => {
            result[column_name] = {
                udtName: udt_name.replace(/^_/, ''),
                nullable: is_nullable === 'YES',
                isArray: udt_name.startsWith('_'),
                hasDefault: has_default,
            }
            return result
        }, {} as TableDefinition)
    }

    public async getTableTypes(tableSchema: string, tableName: string, customTypes: Set<string>) {
        const enumTypes = await this.getEnums(tableSchema)
        const columnComments = await this.getColumnComments(tableSchema, tableName)
        return mapPostgresTableDefinitionToType(
            this.config, 
            await this.getTableDefinition(tableSchema, tableName), 
            new Set(Object.keys(enumTypes)), 
            customTypes,
            columnComments
        )
    }

    public async getSchemaTables(schemaName: string): Promise<string[]> {
        const result = await this.db.query(`
            SELECT table_name
            FROM information_schema.columns
            WHERE table_schema = $1
            GROUP BY table_name
        `, [schemaName])
        if (result.rows.length === 0) {
            console.error(`Missing schema: ${schemaName}`)
        }
        return result.rows.map(({ table_name }) => table_name)
    }

    /**
        public async getPrimaryKeys(schemaName: string) {
        interface PrimaryKeyDefinition {
            table_name: string;
            constraint_name: string;
            ordinal_position: number;
            key_column: string;
        }

        // https://dataedo.com/kb/query/postgresql/list-all-primary-keys-and-their-columns
        const keysResult: PrimaryKeyDefinition[] = await this.db.query(
            `
                SELECT
                    kcu.table_name,
                    tco.constraint_name,
                    kcu.ordinal_position as position,
                    kcu.column_name as key_column
                FROM information_schema.table_constraints tco
                JOIN information_schema.key_column_usage kcu
                    on kcu.constraint_name = tco.constraint_name
                    and kcu.constraint_schema = tco.constraint_schema
                    and kcu.constraint_name = tco.constraint_name
                WHERE tco.constraint_type = 'PRIMARY KEY'
                  AND kcu.table_schema = $1
                ORDER BY kcu.table_name,
                         position;
            `,
            [schemaName],
        );

        return []
    }
    **/

    public async getColumnComments(schemaName: string, tableName: string) {
        // See https://stackoverflow.com/a/4946306/388951
        const commentsResult = await this.db.query<{
            table_name: string;
            column_name: string;
            description: string;
        }>(
            `
                SELECT
                    c.table_name,
                    c.column_name,
                    pgd.description
                FROM pg_catalog.pg_statio_all_tables AS st
                INNER JOIN pg_catalog.pg_description pgd ON (pgd.objoid=st.relid)
                INNER JOIN information_schema.columns c ON (
                    pgd.objsubid=c.ordinal_position AND
                    c.table_schema=st.schemaname AND
                    c.table_name=st.relname
                )
                WHERE c.table_schema = $1 and c.table_name = $2
            `,
            [schemaName, tableName],
        );
        return commentsResult.rows.reduce((result, { column_name, description }) => {
            result[column_name] = description
            return result
        }, {} as Record<string, string>)
    }

        /**
    public async getTableComments(schemaName: string) {
        interface TableComment {
            table_name: string;
            description: string;
        }
        const comments: TableComment[] = await this.db.query(
            `
                SELECT
                    t.table_name,
                    pgd.description
                FROM pg_catalog.pg_statio_all_tables AS st
                INNER JOIN pg_catalog.pg_description pgd ON (pgd.objoid=st.relid)
                INNER JOIN information_schema.tables t ON (
                    t.table_schema=st.schemaname AND
                    t.table_name=st.relname
                )
                WHERE pgd.objsubid = 0
                  AND t.table_schema = $1;
            `,
            [schemaName],
        );

        return _.fromPairs(comments.map((c) => [c.table_name, c.description]));
    }

    async getForeignKeys(schemaName: string) {
        interface ForeignKey {
            table_name: string;
            column_name: string;
            foreign_table_name: string;
            foreign_column_name: string;
            conname: string;
        }
        // See https://stackoverflow.com/a/10950402/388951
        const fkeys: ForeignKey[] = await this.db.query(
            `
            SELECT
                cl2.relname AS table_name,
                att2.attname AS column_name,
                cl.relname AS foreign_table_name,
                att.attname AS foreign_column_name,
                conname
            FROM
                (SELECT
                    unnest(con1.conkey) AS "parent",
                    unnest(con1.confkey) AS "child",
                    con1.confrelid,
                    con1.conrelid,
                    con1.conname
                FROM pg_class cl
                JOIN pg_namespace ns ON cl.relnamespace = ns.oid
                JOIN pg_constraint con1 ON con1.conrelid = cl.oid
                WHERE ns.nspname = $1 AND con1.contype = 'f'
                ) con
            JOIN pg_attribute att ON att.attrelid = con.confrelid and att.attnum = con.child
            JOIN pg_class cl ON cl.oid = con.confrelid
            JOIN pg_class cl2 ON cl2.oid = con.conrelid
            JOIN pg_attribute att2 ON att2.attrelid = con.conrelid AND att2.attnum = con.parent
            `,
            [schemaName],
        );

        // Multi-column foreign keys are harder to model.
        // To get consistent outputs, just ignore them for now.
        const countKey = (fk: ForeignKey) => `${fk.table_name},${fk.conname}`;
        const colCounts = _.countBy(fkeys, countKey);

        return _(fkeys)
            .filter((c) => colCounts[countKey(c)] < 2)
            .groupBy((c) => c.table_name)
            .mapValues((tks) =>
                _.fromPairs(
                    tks.map((ck) => [
                        ck.column_name,
                        { table: ck.foreign_table_name, column: ck.foreign_column_name },
                    ]),
                ),
            )
            .value();
    }

    async getMeta(schemaName: string): Promise<Metadata> {
        if (this.metadata && schemaName === this.metadata.schema) {
            return this.metadata;
        }

        const [
            enumTypes,
            tableToKeys,
            foreignKeys,
            columnComments,
            tableComments,
        ] = await Promise.all([
            this.getEnumTypes(),
            this.getPrimaryKeys(schemaName),
            this.getForeignKeys(schemaName),
            this.getColumnComments(schemaName),
            this.getTableComments(schemaName),
        ]);

        const metadata: Metadata = {
            schema: schemaName,
            enumTypes,
            tableToKeys,
            foreignKeys,
            columnComments,
            tableComments,
        };

        this.metadata = metadata;
        return metadata;
    }
    */
}


================================================
FILE: tsconfig.json
================================================
{
    "compilerOptions": {
        "module": "commonjs",
        "target": "es5",
        "lib": ["es2020"],
        "strict": true,
        "noImplicitAny": true,
        "declaration": true,
        "strictNullChecks": true,
        "sourceMap": true,
        "outDir": "dist",
        "esModuleInterop": true,
        "resolveJsonModule": true
    },
    "exclude": ["node_modules"]
}
Download .txt
gitextract_tv4k2ze4/

├── .github/
│   └── workflows/
│       ├── main.yml
│       └── publish.yml
├── .gitignore
├── .npmignore
├── .yarnrc.yml
├── CHANGELOG.md
├── LICENSE
├── README.md
├── bin/
│   ├── schemats-mysql.ts
│   ├── schemats-postgres.ts
│   └── schemats.ts
├── example/
│   ├── create-db.ts
│   ├── db-custom-types.ts
│   ├── db-types.ts
│   └── schema.sql
├── package.json
├── src/
│   ├── config.ts
│   ├── generator.ts
│   ├── schema-interfaces.ts
│   ├── schema-mysql.ts
│   └── schema-postgres.ts
└── tsconfig.json
Download .txt
SYMBOL INDEX (53 symbols across 10 files)

FILE: bin/schemats-mysql.ts
  type Command (line 10) | type Command = commander.Command

FILE: bin/schemats-postgres.ts
  type Command (line 10) | type Command = commander.Command

FILE: example/db-custom-types.ts
  type RandomPetFacts (line 1) | type RandomPetFacts = Record<string, string>

FILE: example/db-types.ts
  type Animal (line 14) | enum Animal {
  type User (line 19) | interface User {
  type Pet (line 24) | interface Pet {
  type Tables (line 36) | interface Tables {
  type CustomTypes (line 41) | type CustomTypes = RandomPetFacts

FILE: example/schema.sql
  type "pet_store" (line 11) | CREATE TABLE "pet_store"."user" (
  type "pet_store" (line 16) | CREATE TABLE "pet_store"."pet" (

FILE: src/config.ts
  type ConfigValues (line 3) | interface ConfigValues {
  class Config (line 16) | class Config {
    method constructor (line 17) | constructor (public config: Partial<ConfigValues> & Pick<ConfigValues,...
    method getCLICommand (line 29) | public getCLICommand (dbConnection: string): string {
    method enums (line 43) | public get enums () {
    method tables (line 47) | public get tables () {
    method schema (line 51) | public get schema () {
    method writeHeader (line 55) | public get writeHeader () {
    method typesFile (line 59) | public get typesFile () {
    method throwOnMissingType (line 63) | public get throwOnMissingType () {
    method transformTypeName (line 67) | public transformTypeName (typename: string) {
    method transformColumnName (line 71) | public transformColumnName (columnName: string) {

FILE: src/generator.ts
  function generateEnum (line 21) | function generateEnum(config: Config, enumObject: EnumTypes): string[] {
  function generateTableInterface (line 34) | function generateTableInterface(config: Config, tableNameRaw: string, ta...

FILE: src/schema-interfaces.ts
  type ForeignKey (line 1) | interface ForeignKey {
  type ColumnDefinition (line 6) | interface ColumnDefinition {
  type Metadata (line 16) | interface Metadata {
  type EnumTypes (line 25) | type EnumTypes = Record<string, string[]>
  type TableDefinition (line 26) | type TableDefinition = Record<string, ColumnDefinition>
  type Database (line 28) | interface Database {

FILE: src/schema-mysql.ts
  class MysqlDatabase (line 94) | class MysqlDatabase implements Database {
    method constructor (line 98) | constructor (private config: Config, public connectionString: string) {
    method isReady (line 101) | public async isReady(): Promise<void> {
    method close (line 105) | public async close(): Promise<void> {
    method getConnectionString (line 109) | public getConnectionString (): string {
    method getDefaultSchema (line 113) | public getDefaultSchema (): string {
    method getEnums (line 117) | public async getEnums(schema: string): Promise<EnumTypes> {
    method getTableDefinition (line 136) | public async getTableDefinition (tableSchema: string, tableName: strin...
    method getTableTypes (line 157) | public async getTableTypes (tableSchema: string, tableName: string, cu...
    method getSchemaTables (line 169) | public async getSchemaTables (schemaName: string): Promise<string[]> {
    method getColumnComments (line 181) | public async getColumnComments(schemaName: string, tableName: string) {
    method query (line 201) | private async query <T>(query: string, args: any[]): Promise<T[]> {

FILE: src/schema-postgres.ts
  class PostgresDatabase (line 83) | class PostgresDatabase implements Database {
    method constructor (line 87) | constructor(private config: Config, private connectionString?: string) {
    method isReady (line 91) | public async isReady() {
    method close (line 98) | public async close() {
    method getConnectionString (line 102) | public getConnectionString(): string {
    method getDefaultSchema (line 106) | public getDefaultSchema(): string {
    method getEnums (line 110) | public async getEnums(schema: string): Promise<EnumTypes> {
    method getTableDefinition (line 126) | public async getTableDefinition(tableSchema: string, tableName: string) {
    method getTableTypes (line 147) | public async getTableTypes(tableSchema: string, tableName: string, cus...
    method getSchemaTables (line 159) | public async getSchemaTables(schemaName: string): Promise<string[]> {
    method getColumnComments (line 206) | public async getColumnComments(schemaName: string, tableName: string) {
Condensed preview — 22 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (53K chars).
[
  {
    "path": ".github/workflows/main.yml",
    "chars": 756,
    "preview": "name: Build\nrun-name: Building changes\non: [push]\njobs:\n  main:\n    runs-on: ubuntu-latest\n    services:\n      postgres:"
  },
  {
    "path": ".github/workflows/publish.yml",
    "chars": 478,
    "preview": "name: Publish Package to npmjs\non:\n  release:\n    types: [published]\njobs:\n  build:\n    runs-on: ubuntu-latest\n    steps"
  },
  {
    "path": ".gitignore",
    "chars": 1865,
    "preview": "# Build\n.build\n\n# Logs\nlogs\n*.log\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\nlerna-debug.log*\n.pnpm-debug.log*\n\n# Di"
  },
  {
    "path": ".npmignore",
    "chars": 29,
    "preview": ".npmrc\nnode_modules\n.circleci"
  },
  {
    "path": ".yarnrc.yml",
    "chars": 25,
    "preview": "nodeLinker: node-modules\n"
  },
  {
    "path": "CHANGELOG.md",
    "chars": 3119,
    "preview": "## [1.0.8] - 2022.03.20\n\nmisc: updating packages\nci: switching to github actions\n\n## [1.0.7] - 2022.11.27\n\nfeat: add --n"
  },
  {
    "path": "LICENSE",
    "chars": 1095,
    "preview": "MIT License\n\nCopyright (c) 2021 Vlandor Ltd\nCopyright (c) 2016 SweetIQ\n\nPermission is hereby granted, free of charge, to"
  },
  {
    "path": "README.md",
    "chars": 6925,
    "preview": "# Schemats\n\nBefore anything, I would like to give a massive thank you to [sweetiq](https://www.npmjs.com/package/schemat"
  },
  {
    "path": "bin/schemats-mysql.ts",
    "chars": 1975,
    "preview": "import * as commander from 'commander'\nimport { Config, typescriptOfSchema } from '../src/generator'\nimport { MysqlDatab"
  },
  {
    "path": "bin/schemats-postgres.ts",
    "chars": 2349,
    "preview": "import * as commander from 'commander'\nimport { Config, typescriptOfSchema } from '../src/generator'\nimport { PostgresDa"
  },
  {
    "path": "bin/schemats.ts",
    "chars": 350,
    "preview": "#!/usr/bin/env node\nimport { version } from '../package.json'\n\nimport { Command } from 'commander'\nimport { postgres } f"
  },
  {
    "path": "example/create-db.ts",
    "chars": 781,
    "preview": "import { promises } from 'fs'\nimport { Client } from 'pg'\n\nconst createDB = async () => {\n    const db = new Client('pos"
  },
  {
    "path": "example/db-custom-types.ts",
    "chars": 51,
    "preview": "export type RandomPetFacts = Record<string, string>"
  },
  {
    "path": "example/db-types.ts",
    "chars": 780,
    "preview": "\n/**\n * AUTO-GENERATED FILE @ Wed, 20 Mar 2024 14:40:42 GMT - DO NOT EDIT!\n *\n * This file was automatically generated b"
  },
  {
    "path": "example/schema.sql",
    "chars": 685,
    "preview": "CREATE EXTENSION IF NOT EXISTS pgcrypto;\n\nDROP SCHEMA IF EXISTS \"pet_store\" CASCADE;\nCREATE SCHEMA \"pet_store\";\n\nCREATE "
  },
  {
    "path": "package.json",
    "chars": 1433,
    "preview": "{\n  \"name\": \"@vramework/schemats\",\n  \"version\": \"1.0.8\",\n  \"description\": \"Generate typescript interface definitions fro"
  },
  {
    "path": "src/config.ts",
    "chars": 1879,
    "preview": "import camelCase from 'camelcase'\n\nexport interface ConfigValues {\n    schema: string\n    tables: string[]\n    camelCase"
  },
  {
    "path": "src/generator.ts",
    "chars": 3504,
    "preview": "import { Config, ConfigValues } from './config'\nimport { version } from '../package.json'\nimport { Database } from './sc"
  },
  {
    "path": "src/schema-interfaces.ts",
    "chars": 1142,
    "preview": "export interface ForeignKey {\n    table: string;\n    column: string;\n}\n\nexport interface ColumnDefinition {\n    udtName:"
  },
  {
    "path": "src/schema-mysql.ts",
    "chars": 8006,
    "preview": "import { Config } from './generator'\nimport { TableDefinition, Database, EnumTypes } from './schema-interfaces'\nimport {"
  },
  {
    "path": "src/schema-postgres.ts",
    "chars": 12720,
    "preview": "import { Client } from 'pg'\nimport { Config } from './generator'\nimport { TableDefinition, Database, EnumTypes } from '."
  },
  {
    "path": "tsconfig.json",
    "chars": 388,
    "preview": "{\n    \"compilerOptions\": {\n        \"module\": \"commonjs\",\n        \"target\": \"es5\",\n        \"lib\": [\"es2020\"],\n        \"st"
  }
]

About this extraction

This page contains the full source code of the vramework/schemats GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 22 files (49.2 KB), approximately 12.2k tokens, and a symbol index with 53 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!