Repository: drizzle-team/drizzle-orm
Branch: main
Commit: 4aa6ecfee4b4
Files: 1373
Total size: 10.4 MB
Directory structure:
gitextract_jk7qoxoz/
├── .eslintignore
├── .eslintrc.yaml
├── .github/
│ ├── FUNDING.yml
│ ├── ISSUE_TEMPLATE/
│ │ ├── bug-template.yaml
│ │ ├── config.yml
│ │ ├── docs-template.yaml
│ │ └── feature-template.yaml
│ └── workflows/
│ ├── codeql.yml
│ ├── release-feature-branch.yaml
│ ├── release-latest.yaml
│ ├── router.yaml
│ └── unpublish-release-feature-branch.yaml
├── .gitignore
├── .markdownlint.yaml
├── .npmrc
├── .nvmrc
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── SECURITY.md
├── changelogs/
│ ├── README.md
│ ├── drizzle-arktype/
│ │ ├── 0.1.2.md
│ │ └── 0.1.3.md
│ ├── drizzle-kit/
│ │ ├── 0.23.2.md
│ │ ├── 0.24.0.md
│ │ ├── 0.24.1.md
│ │ ├── 0.24.2.md
│ │ ├── 0.25.0.md
│ │ ├── 0.26.0.md
│ │ ├── 0.26.1.md
│ │ ├── 0.26.2.md
│ │ ├── 0.27.0.md
│ │ ├── 0.27.1.md
│ │ ├── 0.27.2.md
│ │ ├── 0.28.0.md
│ │ ├── 0.28.1.md
│ │ ├── 0.29.0.md
│ │ ├── 0.29.1.md
│ │ ├── 0.30.0.md
│ │ ├── 0.30.1.md
│ │ ├── 0.30.2.md
│ │ ├── 0.30.3.md
│ │ ├── 0.30.4.md
│ │ ├── 0.30.5.md
│ │ ├── 0.30.6.md
│ │ ├── 0.31.0.md
│ │ ├── 0.31.1.md
│ │ ├── 0.31.10.md
│ │ ├── 0.31.2.md
│ │ ├── 0.31.3.md
│ │ ├── 0.31.4.md
│ │ ├── 0.31.5.md
│ │ ├── 0.31.6.md
│ │ ├── 0.31.7.md
│ │ ├── 0.31.8.md
│ │ └── 0.31.9.md
│ ├── drizzle-orm/
│ │ ├── 0.12.0-beta.23.md
│ │ ├── 0.12.0-beta.24.md
│ │ ├── 0.13.0.md
│ │ ├── 0.13.1.md
│ │ ├── 0.14.1.md
│ │ ├── 0.14.2.md
│ │ ├── 0.15.0.md
│ │ ├── 0.15.1.md
│ │ ├── 0.15.2.md
│ │ ├── 0.15.3.md
│ │ ├── 0.16.0.md
│ │ ├── 0.16.1.md
│ │ ├── 0.17.0.md
│ │ ├── 0.17.1.md
│ │ ├── 0.17.2.md
│ │ ├── 0.17.3.md
│ │ ├── 0.17.4.md
│ │ ├── 0.17.5.md
│ │ ├── 0.17.6.md
│ │ ├── 0.17.7.md
│ │ ├── 0.18.0.md
│ │ ├── 0.19.0.md
│ │ ├── 0.19.1.md
│ │ ├── 0.20.0.md
│ │ ├── 0.20.1.md
│ │ ├── 0.20.2.md
│ │ ├── 0.20.3.md
│ │ ├── 0.21.0.md
│ │ ├── 0.21.1.md
│ │ ├── 0.22.0.md
│ │ ├── 0.23.0.md
│ │ ├── 0.23.1.md
│ │ ├── 0.23.10.md
│ │ ├── 0.23.11.md
│ │ ├── 0.23.12.md
│ │ ├── 0.23.13.md
│ │ ├── 0.23.2.md
│ │ ├── 0.23.3.md
│ │ ├── 0.23.4.md
│ │ ├── 0.23.5.md
│ │ ├── 0.23.6.md
│ │ ├── 0.23.7.md
│ │ ├── 0.23.8.md
│ │ ├── 0.23.9.md
│ │ ├── 0.24.0.md
│ │ ├── 0.24.1.md
│ │ ├── 0.24.2.md
│ │ ├── 0.24.3.md
│ │ ├── 0.24.4.md
│ │ ├── 0.24.5.md
│ │ ├── 0.25.0.md
│ │ ├── 0.25.1.md
│ │ ├── 0.25.2.md
│ │ ├── 0.25.3.md
│ │ ├── 0.25.4.md
│ │ ├── 0.26.0.md
│ │ ├── 0.26.1.md
│ │ ├── 0.26.2.md
│ │ ├── 0.26.3.md
│ │ ├── 0.26.4.md
│ │ ├── 0.26.5.md
│ │ ├── 0.27.0.md
│ │ ├── 0.27.1.md
│ │ ├── 0.27.2.md
│ │ ├── 0.28.0.md
│ │ ├── 0.28.1.md
│ │ ├── 0.28.2.md
│ │ ├── 0.28.3.md
│ │ ├── 0.28.4.md
│ │ ├── 0.28.5.md
│ │ ├── 0.28.6.md
│ │ ├── 0.29.0.md
│ │ ├── 0.29.1.md
│ │ ├── 0.29.2.md
│ │ ├── 0.29.3.md
│ │ ├── 0.29.4.md
│ │ ├── 0.29.5.md
│ │ ├── 0.30.0.md
│ │ ├── 0.30.1.md
│ │ ├── 0.30.10.md
│ │ ├── 0.30.2.md
│ │ ├── 0.30.3.md
│ │ ├── 0.30.4.md
│ │ ├── 0.30.5.md
│ │ ├── 0.30.6.md
│ │ ├── 0.30.7-preview.md
│ │ ├── 0.30.7.md
│ │ ├── 0.30.8.md
│ │ ├── 0.30.9.md
│ │ ├── 0.31.0-beta.md
│ │ ├── 0.31.0.md
│ │ ├── 0.31.1.md
│ │ ├── 0.31.2.md
│ │ ├── 0.31.3.md
│ │ ├── 0.31.4.md
│ │ ├── 0.32.0-beta.md
│ │ ├── 0.32.0.md
│ │ ├── 0.32.1.md
│ │ ├── 0.32.2.md
│ │ ├── 0.33.0.md
│ │ ├── 0.34.0.md
│ │ ├── 0.34.1.md
│ │ ├── 0.35.0.md
│ │ ├── 0.35.1.md
│ │ ├── 0.35.2.md
│ │ ├── 0.35.3.md
│ │ ├── 0.36.0.md
│ │ ├── 0.36.1.md
│ │ ├── 0.36.2.md
│ │ ├── 0.36.3.md
│ │ ├── 0.36.4.md
│ │ ├── 0.37.0.md
│ │ ├── 0.38.0.md
│ │ ├── 0.38.1.md
│ │ ├── 0.38.2.md
│ │ ├── 0.38.3.md
│ │ ├── 0.38.4.md
│ │ ├── 0.39.0.md
│ │ ├── 0.39.1.md
│ │ ├── 0.39.2.md
│ │ ├── 0.39.3.md
│ │ ├── 0.40.0.md
│ │ ├── 0.40.1.md
│ │ ├── 0.41.0.md
│ │ ├── 0.42.0.md
│ │ ├── 0.43.0.md
│ │ ├── 0.43.1.md
│ │ ├── 0.44.0.md
│ │ ├── 0.44.1.md
│ │ ├── 0.44.2.md
│ │ ├── 0.44.3.md
│ │ ├── 0.44.4.md
│ │ ├── 0.44.5.md
│ │ ├── 0.44.6.md
│ │ ├── 0.44.7.md
│ │ ├── 0.45.0.md
│ │ └── 0.45.1.md
│ ├── drizzle-orm-mysql/
│ │ ├── 0.14.1.md
│ │ ├── 0.14.2.md
│ │ ├── 0.14.3.md
│ │ ├── 0.15.0.md
│ │ ├── 0.15.1.md
│ │ ├── 0.15.2.md
│ │ ├── 0.15.3.md
│ │ ├── 0.16.0.md
│ │ ├── 0.16.1.md
│ │ └── 0.16.2.md
│ ├── drizzle-orm-pg/
│ │ ├── 0.12.0-beta.40.md
│ │ ├── 0.13.0.md
│ │ ├── 0.13.1.md
│ │ ├── 0.13.2.md
│ │ ├── 0.13.3.md
│ │ ├── 0.13.4.md
│ │ ├── 0.14.0.md
│ │ ├── 0.14.1.md
│ │ ├── 0.14.2.md
│ │ ├── 0.14.3.md
│ │ ├── 0.14.4.md
│ │ ├── 0.15.0.md
│ │ ├── 0.15.1.md
│ │ ├── 0.15.2.md
│ │ ├── 0.15.3.md
│ │ ├── 0.16.0.md
│ │ ├── 0.16.1.md
│ │ ├── 0.16.2.md
│ │ └── 0.16.3.md
│ ├── drizzle-orm-sqlite/
│ │ ├── 0.12.0-beta.17.md
│ │ ├── 0.12.0-beta.18.md
│ │ ├── 0.12.0-beta.19.md
│ │ ├── 0.12.0-beta.20.md
│ │ ├── 0.12.0-beta.21.md
│ │ ├── 0.13.0.md
│ │ ├── 0.14.1.md
│ │ ├── 0.14.2.md
│ │ ├── 0.14.3.md
│ │ ├── 0.14.4.md
│ │ ├── 0.14.5.md
│ │ ├── 0.15.0.md
│ │ ├── 0.15.2.md
│ │ ├── 0.15.3.md
│ │ ├── 0.15.4.md
│ │ ├── 0.16.0.md
│ │ └── 0.16.1.md
│ ├── drizzle-seed/
│ │ ├── 0.1.1.md
│ │ ├── 0.1.2.md
│ │ ├── 0.1.3.md
│ │ ├── 0.2.1.md
│ │ ├── 0.3.0.md
│ │ └── 0.3.1.md
│ ├── drizzle-typebox/
│ │ ├── 0.1.0.md
│ │ ├── 0.1.1.md
│ │ ├── 0.2.0.md
│ │ ├── 0.2.1.md
│ │ ├── 0.3.0.md
│ │ ├── 0.3.1.md
│ │ ├── 0.3.2.md
│ │ └── 0.3.3.md
│ ├── drizzle-valibot/
│ │ ├── 0.1.0.md
│ │ ├── 0.1.1.md
│ │ ├── 0.2.0.md
│ │ ├── 0.3.0.md
│ │ ├── 0.3.1.md
│ │ ├── 0.4.0.md
│ │ ├── 0.4.1.md
│ │ └── 0.4.2.md
│ ├── drizzle-zod/
│ │ ├── 0.1.0.md
│ │ ├── 0.1.1.md
│ │ ├── 0.1.2.md
│ │ ├── 0.1.3.md
│ │ ├── 0.1.4.md
│ │ ├── 0.2.0.md
│ │ ├── 0.2.1.md
│ │ ├── 0.3.0.md
│ │ ├── 0.3.1.md
│ │ ├── 0.3.2.md
│ │ ├── 0.4.0.md
│ │ ├── 0.4.1.md
│ │ ├── 0.4.2.md
│ │ ├── 0.4.3.md
│ │ ├── 0.4.4.md
│ │ ├── 0.5.0.md
│ │ ├── 0.5.1.md
│ │ ├── 0.6.0.md
│ │ ├── 0.6.1.md
│ │ ├── 0.7.0.md
│ │ ├── 0.7.1.md
│ │ ├── 0.8.0.md
│ │ ├── 0.8.1.md
│ │ ├── 0.8.2.md
│ │ └── 0.8.3.md
│ └── eslint-plugin-drizzle/
│ ├── 0.2.0.md
│ ├── 0.2.1.md
│ ├── 0.2.2.md
│ └── 0.2.3.md
├── docs/
│ ├── custom-types.lite.md
│ ├── custom-types.md
│ ├── joins.md
│ └── table-introspect-api.md
├── dprint.json
├── drizzle-arktype/
│ ├── README.md
│ ├── benchmarks/
│ │ └── types.ts
│ ├── package.json
│ ├── rollup.config.ts
│ ├── scripts/
│ │ ├── build.ts
│ │ └── fix-imports.ts
│ ├── src/
│ │ ├── column.ts
│ │ ├── column.types.ts
│ │ ├── constants.ts
│ │ ├── index.ts
│ │ ├── schema.ts
│ │ ├── schema.types.internal.ts
│ │ ├── schema.types.ts
│ │ └── utils.ts
│ ├── tests/
│ │ ├── mysql.test.ts
│ │ ├── pg.test.ts
│ │ ├── singlestore.test.ts
│ │ ├── sqlite.test.ts
│ │ ├── tsconfig.json
│ │ └── utils.ts
│ ├── tsconfig.build.json
│ ├── tsconfig.json
│ └── vitest.config.ts
├── drizzle-kit/
│ ├── .gitignore
│ ├── README.md
│ ├── build.dev.ts
│ ├── build.ts
│ ├── imports-checker/
│ │ ├── analyze.ts
│ │ ├── checker.ts
│ │ ├── grammar/
│ │ │ ├── grammar.ohm
│ │ │ ├── grammar.ohm-bundle.d.ts
│ │ │ └── grammar.ohm-bundle.js
│ │ └── index.ts
│ ├── package.json
│ ├── patches/
│ │ └── difflib@0.2.4.patch
│ ├── src/
│ │ ├── @types/
│ │ │ └── utils.ts
│ │ ├── api.ts
│ │ ├── cli/
│ │ │ ├── commands/
│ │ │ │ ├── _es5.ts
│ │ │ │ ├── check.ts
│ │ │ │ ├── drop.ts
│ │ │ │ ├── introspect.ts
│ │ │ │ ├── libSqlPushUtils.ts
│ │ │ │ ├── migrate.ts
│ │ │ │ ├── mysqlIntrospect.ts
│ │ │ │ ├── mysqlPushUtils.ts
│ │ │ │ ├── mysqlUp.ts
│ │ │ │ ├── pgIntrospect.ts
│ │ │ │ ├── pgPushUtils.ts
│ │ │ │ ├── pgUp.ts
│ │ │ │ ├── push.ts
│ │ │ │ ├── singlestoreIntrospect.ts
│ │ │ │ ├── singlestorePushUtils.ts
│ │ │ │ ├── singlestoreUp.ts
│ │ │ │ ├── sqliteIntrospect.ts
│ │ │ │ ├── sqlitePushUtils.ts
│ │ │ │ ├── sqliteUp.ts
│ │ │ │ └── utils.ts
│ │ │ ├── connections.ts
│ │ │ ├── index.ts
│ │ │ ├── schema.ts
│ │ │ ├── selector-ui.ts
│ │ │ ├── utils.ts
│ │ │ ├── validations/
│ │ │ │ ├── cli.ts
│ │ │ │ ├── common.ts
│ │ │ │ ├── gel.ts
│ │ │ │ ├── libsql.ts
│ │ │ │ ├── mysql.ts
│ │ │ │ ├── outputs.ts
│ │ │ │ ├── postgres.ts
│ │ │ │ ├── singlestore.ts
│ │ │ │ ├── sqlite.ts
│ │ │ │ └── studio.ts
│ │ │ └── views.ts
│ │ ├── extensions/
│ │ │ ├── getTablesFilterByExtensions.ts
│ │ │ └── vector.ts
│ │ ├── global.ts
│ │ ├── index.ts
│ │ ├── introspect-gel.ts
│ │ ├── introspect-mysql.ts
│ │ ├── introspect-pg.ts
│ │ ├── introspect-singlestore.ts
│ │ ├── introspect-sqlite.ts
│ │ ├── jsonDiffer.js
│ │ ├── jsonStatements.ts
│ │ ├── loader.mjs
│ │ ├── migrationPreparator.ts
│ │ ├── schemaValidator.ts
│ │ ├── serializer/
│ │ │ ├── gelSchema.ts
│ │ │ ├── gelSerializer.ts
│ │ │ ├── index.ts
│ │ │ ├── mysqlImports.ts
│ │ │ ├── mysqlSchema.ts
│ │ │ ├── mysqlSerializer.ts
│ │ │ ├── pgImports.ts
│ │ │ ├── pgSchema.ts
│ │ │ ├── pgSerializer.ts
│ │ │ ├── singlestoreImports.ts
│ │ │ ├── singlestoreSchema.ts
│ │ │ ├── singlestoreSerializer.ts
│ │ │ ├── sqliteImports.ts
│ │ │ ├── sqliteSchema.ts
│ │ │ ├── sqliteSerializer.ts
│ │ │ ├── studio.ts
│ │ │ └── utils.ts
│ │ ├── simulator.ts
│ │ ├── snapshotsDiffer.ts
│ │ ├── sqlgenerator.ts
│ │ ├── statementCombiner.ts
│ │ ├── utils/
│ │ │ ├── certs.ts
│ │ │ └── words.ts
│ │ └── utils.ts
│ ├── tests/
│ │ ├── bin.test.ts
│ │ ├── cli/
│ │ │ ├── d1http.config.ts
│ │ │ ├── drizzle.config.ts
│ │ │ ├── durable-sqlite.config.ts
│ │ │ ├── expo.config.ts
│ │ │ ├── postgres.config.ts
│ │ │ ├── postgres2.config.ts
│ │ │ ├── schema.ts
│ │ │ └── turso.config.ts
│ │ ├── cli-export.test.ts
│ │ ├── cli-generate.test.ts
│ │ ├── cli-migrate.test.ts
│ │ ├── cli-push.test.ts
│ │ ├── common.ts
│ │ ├── indexes/
│ │ │ ├── common.ts
│ │ │ └── pg.test.ts
│ │ ├── introspect/
│ │ │ ├── gel.ext.test.ts
│ │ │ ├── gel.test.ts
│ │ │ ├── libsql.test.ts
│ │ │ ├── mysql.test.ts
│ │ │ ├── pg.test.ts
│ │ │ ├── singlestore.test.ts
│ │ │ └── sqlite.test.ts
│ │ ├── libsql-checks.test.ts
│ │ ├── libsql-statements.test.ts
│ │ ├── libsql-views.test.ts
│ │ ├── migrate/
│ │ │ ├── libsq-schema.ts
│ │ │ ├── libsql-migrate.test.ts
│ │ │ └── migrations/
│ │ │ ├── 0000_little_blizzard.sql
│ │ │ ├── 0001_nebulous_storm.sql
│ │ │ └── meta/
│ │ │ ├── 0000_snapshot.json
│ │ │ ├── 0001_snapshot.json
│ │ │ └── _journal.json
│ │ ├── mysql-checks.test.ts
│ │ ├── mysql-generated.test.ts
│ │ ├── mysql-schemas.test.ts
│ │ ├── mysql-views.test.ts
│ │ ├── mysql.test.ts
│ │ ├── pg-array.test.ts
│ │ ├── pg-checks.test.ts
│ │ ├── pg-columns.test.ts
│ │ ├── pg-enums.test.ts
│ │ ├── pg-generated.test.ts
│ │ ├── pg-identity.test.ts
│ │ ├── pg-schemas.test.ts
│ │ ├── pg-sequences.test.ts
│ │ ├── pg-tables.test.ts
│ │ ├── pg-views.test.ts
│ │ ├── push/
│ │ │ ├── common.ts
│ │ │ ├── libsql.test.ts
│ │ │ ├── mysql-push.test.ts
│ │ │ ├── mysql.test.ts
│ │ │ ├── pg.test.ts
│ │ │ ├── singlestore-push.test.ts
│ │ │ ├── singlestore.test.ts
│ │ │ └── sqlite.test.ts
│ │ ├── rls/
│ │ │ ├── pg-policy.test.ts
│ │ │ └── pg-role.test.ts
│ │ ├── schemaDiffer.ts
│ │ ├── singlestore-generated.test.ts
│ │ ├── singlestore-schemas.test.ts
│ │ ├── singlestore.test.ts
│ │ ├── sqlite-checks.test.ts
│ │ ├── sqlite-columns.test.ts
│ │ ├── sqlite-generated.test.ts
│ │ ├── sqlite-tables.test.ts
│ │ ├── sqlite-views.test.ts
│ │ ├── statements-combiner/
│ │ │ ├── libsql-statements-combiner.test.ts
│ │ │ ├── singlestore-statements-combiner.test.ts
│ │ │ └── sqlite-statements-combiner.test.ts
│ │ ├── test/
│ │ │ └── sqlite.test.ts
│ │ ├── testsinglestore.ts
│ │ ├── validations.test.ts
│ │ └── wrap-param.test.ts
│ ├── tsconfig.build.json
│ ├── tsconfig.cli-types.json
│ ├── tsconfig.json
│ └── vitest.config.ts
├── drizzle-orm/
│ ├── .madgerc
│ ├── package.json
│ ├── scripts/
│ │ ├── build.ts
│ │ └── fix-imports.ts
│ ├── src/
│ │ ├── alias.ts
│ │ ├── aws-data-api/
│ │ │ ├── common/
│ │ │ │ └── index.ts
│ │ │ └── pg/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── batch.ts
│ │ ├── better-sqlite3/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── bun-sql/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── bun-sqlite/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── cache/
│ │ │ ├── core/
│ │ │ │ ├── cache.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── types.ts
│ │ │ ├── readme.md
│ │ │ └── upstash/
│ │ │ ├── cache.ts
│ │ │ └── index.ts
│ │ ├── casing.ts
│ │ ├── column-builder.ts
│ │ ├── column.ts
│ │ ├── d1/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── durable-sqlite/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── entity.ts
│ │ ├── errors.ts
│ │ ├── expo-sqlite/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ ├── query.ts
│ │ │ └── session.ts
│ │ ├── gel/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── gel-core/
│ │ │ ├── alias.ts
│ │ │ ├── checks.ts
│ │ │ ├── columns/
│ │ │ │ ├── all.ts
│ │ │ │ ├── bigint.ts
│ │ │ │ ├── bigintT.ts
│ │ │ │ ├── boolean.ts
│ │ │ │ ├── bytes.ts
│ │ │ │ ├── common.ts
│ │ │ │ ├── custom.ts
│ │ │ │ ├── date-duration.ts
│ │ │ │ ├── date.common.ts
│ │ │ │ ├── decimal.ts
│ │ │ │ ├── double-precision.ts
│ │ │ │ ├── duration.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── int.common.ts
│ │ │ │ ├── integer.ts
│ │ │ │ ├── json.ts
│ │ │ │ ├── localdate.ts
│ │ │ │ ├── localtime.ts
│ │ │ │ ├── real.ts
│ │ │ │ ├── relative-duration.ts
│ │ │ │ ├── smallint.ts
│ │ │ │ ├── text.ts
│ │ │ │ ├── timestamp.ts
│ │ │ │ ├── timestamptz.ts
│ │ │ │ └── uuid.ts
│ │ │ ├── db.ts
│ │ │ ├── dialect.ts
│ │ │ ├── expressions.ts
│ │ │ ├── foreign-keys.ts
│ │ │ ├── index.ts
│ │ │ ├── indexes.ts
│ │ │ ├── policies.ts
│ │ │ ├── primary-keys.ts
│ │ │ ├── query-builders/
│ │ │ │ ├── count.ts
│ │ │ │ ├── delete.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── insert.ts
│ │ │ │ ├── query-builder.ts
│ │ │ │ ├── query.ts
│ │ │ │ ├── raw.ts
│ │ │ │ ├── refresh-materialized-view.ts
│ │ │ │ ├── select.ts
│ │ │ │ ├── select.types.ts
│ │ │ │ └── update.ts
│ │ │ ├── roles.ts
│ │ │ ├── schema.ts
│ │ │ ├── sequence.ts
│ │ │ ├── session.ts
│ │ │ ├── subquery.ts
│ │ │ ├── table.ts
│ │ │ ├── unique-constraint.ts
│ │ │ ├── utils.ts
│ │ │ ├── view-base.ts
│ │ │ ├── view-common.ts
│ │ │ └── view.ts
│ │ ├── index.ts
│ │ ├── knex/
│ │ │ ├── README.md
│ │ │ └── index.ts
│ │ ├── kysely/
│ │ │ ├── README.md
│ │ │ └── index.ts
│ │ ├── libsql/
│ │ │ ├── driver-core.ts
│ │ │ ├── driver.ts
│ │ │ ├── http/
│ │ │ │ └── index.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ ├── node/
│ │ │ │ └── index.ts
│ │ │ ├── session.ts
│ │ │ ├── sqlite3/
│ │ │ │ └── index.ts
│ │ │ ├── wasm/
│ │ │ │ └── index.ts
│ │ │ ├── web/
│ │ │ │ └── index.ts
│ │ │ └── ws/
│ │ │ └── index.ts
│ │ ├── logger.ts
│ │ ├── migrator.ts
│ │ ├── mysql-core/
│ │ │ ├── alias.ts
│ │ │ ├── checks.ts
│ │ │ ├── columns/
│ │ │ │ ├── all.ts
│ │ │ │ ├── bigint.ts
│ │ │ │ ├── binary.ts
│ │ │ │ ├── boolean.ts
│ │ │ │ ├── char.ts
│ │ │ │ ├── common.ts
│ │ │ │ ├── custom.ts
│ │ │ │ ├── date.common.ts
│ │ │ │ ├── date.ts
│ │ │ │ ├── datetime.ts
│ │ │ │ ├── decimal.ts
│ │ │ │ ├── double.ts
│ │ │ │ ├── enum.ts
│ │ │ │ ├── float.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── int.ts
│ │ │ │ ├── json.ts
│ │ │ │ ├── mediumint.ts
│ │ │ │ ├── real.ts
│ │ │ │ ├── serial.ts
│ │ │ │ ├── smallint.ts
│ │ │ │ ├── text.ts
│ │ │ │ ├── time.ts
│ │ │ │ ├── timestamp.ts
│ │ │ │ ├── tinyint.ts
│ │ │ │ ├── varbinary.ts
│ │ │ │ ├── varchar.ts
│ │ │ │ └── year.ts
│ │ │ ├── db.ts
│ │ │ ├── dialect.ts
│ │ │ ├── expressions.ts
│ │ │ ├── foreign-keys.ts
│ │ │ ├── index.ts
│ │ │ ├── indexes.ts
│ │ │ ├── primary-keys.ts
│ │ │ ├── query-builders/
│ │ │ │ ├── count.ts
│ │ │ │ ├── delete.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── insert.ts
│ │ │ │ ├── query-builder.ts
│ │ │ │ ├── query.ts
│ │ │ │ ├── select.ts
│ │ │ │ ├── select.types.ts
│ │ │ │ └── update.ts
│ │ │ ├── schema.ts
│ │ │ ├── session.ts
│ │ │ ├── subquery.ts
│ │ │ ├── table.ts
│ │ │ ├── unique-constraint.ts
│ │ │ ├── utils.ts
│ │ │ ├── view-base.ts
│ │ │ ├── view-common.ts
│ │ │ └── view.ts
│ │ ├── mysql-proxy/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── mysql2/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── neon/
│ │ │ ├── index.ts
│ │ │ ├── neon-auth.ts
│ │ │ └── rls.ts
│ │ ├── neon-http/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── neon-serverless/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── node-postgres/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── op-sqlite/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── operations.ts
│ │ ├── pg-core/
│ │ │ ├── alias.ts
│ │ │ ├── checks.ts
│ │ │ ├── columns/
│ │ │ │ ├── all.ts
│ │ │ │ ├── bigint.ts
│ │ │ │ ├── bigserial.ts
│ │ │ │ ├── boolean.ts
│ │ │ │ ├── char.ts
│ │ │ │ ├── cidr.ts
│ │ │ │ ├── common.ts
│ │ │ │ ├── custom.ts
│ │ │ │ ├── date.common.ts
│ │ │ │ ├── date.ts
│ │ │ │ ├── double-precision.ts
│ │ │ │ ├── enum.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── inet.ts
│ │ │ │ ├── int.common.ts
│ │ │ │ ├── integer.ts
│ │ │ │ ├── interval.ts
│ │ │ │ ├── json.ts
│ │ │ │ ├── jsonb.ts
│ │ │ │ ├── line.ts
│ │ │ │ ├── macaddr.ts
│ │ │ │ ├── macaddr8.ts
│ │ │ │ ├── numeric.ts
│ │ │ │ ├── point.ts
│ │ │ │ ├── postgis_extension/
│ │ │ │ │ ├── geometry.ts
│ │ │ │ │ └── utils.ts
│ │ │ │ ├── real.ts
│ │ │ │ ├── serial.ts
│ │ │ │ ├── smallint.ts
│ │ │ │ ├── smallserial.ts
│ │ │ │ ├── text.ts
│ │ │ │ ├── time.ts
│ │ │ │ ├── timestamp.ts
│ │ │ │ ├── uuid.ts
│ │ │ │ ├── varchar.ts
│ │ │ │ └── vector_extension/
│ │ │ │ ├── bit.ts
│ │ │ │ ├── halfvec.ts
│ │ │ │ ├── sparsevec.ts
│ │ │ │ └── vector.ts
│ │ │ ├── db.ts
│ │ │ ├── dialect.ts
│ │ │ ├── expressions.ts
│ │ │ ├── foreign-keys.ts
│ │ │ ├── index.ts
│ │ │ ├── indexes.ts
│ │ │ ├── policies.ts
│ │ │ ├── primary-keys.ts
│ │ │ ├── query-builders/
│ │ │ │ ├── count.ts
│ │ │ │ ├── delete.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── insert.ts
│ │ │ │ ├── query-builder.ts
│ │ │ │ ├── query.ts
│ │ │ │ ├── raw.ts
│ │ │ │ ├── refresh-materialized-view.ts
│ │ │ │ ├── select.ts
│ │ │ │ ├── select.types.ts
│ │ │ │ └── update.ts
│ │ │ ├── roles.ts
│ │ │ ├── schema.ts
│ │ │ ├── sequence.ts
│ │ │ ├── session.ts
│ │ │ ├── subquery.ts
│ │ │ ├── table.ts
│ │ │ ├── unique-constraint.ts
│ │ │ ├── utils/
│ │ │ │ ├── array.ts
│ │ │ │ └── index.ts
│ │ │ ├── utils.ts
│ │ │ ├── view-base.ts
│ │ │ ├── view-common.ts
│ │ │ └── view.ts
│ │ ├── pg-proxy/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── pglite/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── planetscale-serverless/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── postgres-js/
│ │ │ ├── README.md
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── primary-key.ts
│ │ ├── prisma/
│ │ │ ├── mysql/
│ │ │ │ ├── driver.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── session.ts
│ │ │ ├── pg/
│ │ │ │ ├── driver.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── session.ts
│ │ │ ├── schema.prisma
│ │ │ └── sqlite/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ └── session.ts
│ │ ├── query-builders/
│ │ │ ├── query-builder.ts
│ │ │ └── select.types.ts
│ │ ├── query-promise.ts
│ │ ├── relations.ts
│ │ ├── runnable-query.ts
│ │ ├── selection-proxy.ts
│ │ ├── session.ts
│ │ ├── singlestore/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── singlestore-core/
│ │ │ ├── alias.ts
│ │ │ ├── columns/
│ │ │ │ ├── all.ts
│ │ │ │ ├── bigint.ts
│ │ │ │ ├── binary.ts
│ │ │ │ ├── boolean.ts
│ │ │ │ ├── char.ts
│ │ │ │ ├── common.ts
│ │ │ │ ├── custom.ts
│ │ │ │ ├── date.common.ts
│ │ │ │ ├── date.ts
│ │ │ │ ├── datetime.ts
│ │ │ │ ├── decimal.ts
│ │ │ │ ├── double.ts
│ │ │ │ ├── enum.ts
│ │ │ │ ├── float.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── int.ts
│ │ │ │ ├── json.ts
│ │ │ │ ├── mediumint.ts
│ │ │ │ ├── real.ts
│ │ │ │ ├── serial.ts
│ │ │ │ ├── smallint.ts
│ │ │ │ ├── text.ts
│ │ │ │ ├── time.ts
│ │ │ │ ├── timestamp.ts
│ │ │ │ ├── tinyint.ts
│ │ │ │ ├── varbinary.ts
│ │ │ │ ├── varchar.ts
│ │ │ │ ├── vector.ts
│ │ │ │ └── year.ts
│ │ │ ├── db.ts
│ │ │ ├── dialect.ts
│ │ │ ├── expressions.ts
│ │ │ ├── index.ts
│ │ │ ├── indexes.ts
│ │ │ ├── primary-keys.ts
│ │ │ ├── query-builders/
│ │ │ │ ├── count.ts
│ │ │ │ ├── delete.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── insert.ts
│ │ │ │ ├── query-builder.ts
│ │ │ │ ├── query.ts
│ │ │ │ ├── select.ts
│ │ │ │ ├── select.types.ts
│ │ │ │ └── update.ts
│ │ │ ├── schema.ts
│ │ │ ├── session.ts
│ │ │ ├── subquery.ts
│ │ │ ├── table.ts
│ │ │ ├── unique-constraint.ts
│ │ │ ├── utils.ts
│ │ │ ├── view-base.ts
│ │ │ ├── view-common.ts
│ │ │ └── view.ts
│ │ ├── singlestore-proxy/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── sql/
│ │ │ ├── expressions/
│ │ │ │ ├── conditions.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── select.ts
│ │ │ ├── functions/
│ │ │ │ ├── aggregate.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── vector.ts
│ │ │ ├── index.ts
│ │ │ └── sql.ts
│ │ ├── sql-js/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── sqlite-core/
│ │ │ ├── README.md
│ │ │ ├── alias.ts
│ │ │ ├── checks.ts
│ │ │ ├── columns/
│ │ │ │ ├── all.ts
│ │ │ │ ├── blob.ts
│ │ │ │ ├── common.ts
│ │ │ │ ├── custom.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── integer.ts
│ │ │ │ ├── numeric.ts
│ │ │ │ ├── real.ts
│ │ │ │ └── text.ts
│ │ │ ├── db.ts
│ │ │ ├── dialect.ts
│ │ │ ├── expressions.ts
│ │ │ ├── foreign-keys.ts
│ │ │ ├── index.ts
│ │ │ ├── indexes.ts
│ │ │ ├── primary-keys.ts
│ │ │ ├── query-builders/
│ │ │ │ ├── count.ts
│ │ │ │ ├── delete.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── insert.ts
│ │ │ │ ├── query-builder.ts
│ │ │ │ ├── query.ts
│ │ │ │ ├── raw.ts
│ │ │ │ ├── select.ts
│ │ │ │ ├── select.types.ts
│ │ │ │ └── update.ts
│ │ │ ├── session.ts
│ │ │ ├── subquery.ts
│ │ │ ├── table.ts
│ │ │ ├── unique-constraint.ts
│ │ │ ├── utils.ts
│ │ │ ├── view-base.ts
│ │ │ ├── view-common.ts
│ │ │ └── view.ts
│ │ ├── sqlite-proxy/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── subquery.ts
│ │ ├── supabase/
│ │ │ ├── index.ts
│ │ │ └── rls.ts
│ │ ├── table.ts
│ │ ├── table.utils.ts
│ │ ├── tidb-serverless/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── tracing-utils.ts
│ │ ├── tracing.ts
│ │ ├── utils.ts
│ │ ├── vercel-postgres/
│ │ │ ├── driver.ts
│ │ │ ├── index.ts
│ │ │ ├── migrator.ts
│ │ │ └── session.ts
│ │ ├── version.ts
│ │ ├── view-common.ts
│ │ └── xata-http/
│ │ ├── driver.ts
│ │ ├── index.ts
│ │ ├── migrator.ts
│ │ └── session.ts
│ ├── tests/
│ │ ├── casing/
│ │ │ ├── casing.test.ts
│ │ │ ├── mysql-to-camel.test.ts
│ │ │ ├── mysql-to-snake.test.ts
│ │ │ ├── pg-to-camel.test.ts
│ │ │ ├── pg-to-snake.test.ts
│ │ │ ├── sqlite-to-camel.test.ts
│ │ │ └── sqlite-to-snake.test.ts
│ │ ├── exports.test.ts
│ │ ├── is.test.ts
│ │ ├── makePgArray.test.ts
│ │ ├── parsePgArray.test.ts
│ │ ├── relation.test.ts
│ │ ├── tsconfig.json
│ │ └── type-hints.test.ts
│ ├── tsconfig.build.json
│ ├── tsconfig.dts.json
│ ├── tsconfig.json
│ ├── tsup.config.ts
│ ├── type-tests/
│ │ ├── common/
│ │ │ └── aliased-table.ts
│ │ ├── geldb/
│ │ │ ├── 1-to-1-fk.ts
│ │ │ ├── array.ts
│ │ │ ├── count.ts
│ │ │ ├── db-rel.ts
│ │ │ ├── db.ts
│ │ │ ├── delete.ts
│ │ │ ├── generated-columns.ts
│ │ │ ├── insert.ts
│ │ │ ├── no-strict-null-checks/
│ │ │ │ ├── test.ts
│ │ │ │ └── tsconfig.json
│ │ │ ├── other.ts
│ │ │ ├── select.ts
│ │ │ ├── set-operators.ts
│ │ │ ├── subquery.ts
│ │ │ ├── tables-rel.ts
│ │ │ ├── tables.ts
│ │ │ ├── update.ts
│ │ │ └── with.ts
│ │ ├── knex/
│ │ │ └── index.ts
│ │ ├── kysely/
│ │ │ └── index.ts
│ │ ├── mysql/
│ │ │ ├── 1-to-1-fk.ts
│ │ │ ├── 1000columns.ts
│ │ │ ├── count.ts
│ │ │ ├── db-rel.ts
│ │ │ ├── db.ts
│ │ │ ├── delete.ts
│ │ │ ├── generated-columns.ts
│ │ │ ├── insert.ts
│ │ │ ├── no-strict-null-checks/
│ │ │ │ ├── test.ts
│ │ │ │ └── tsconfig.json
│ │ │ ├── select.ts
│ │ │ ├── set-operators.ts
│ │ │ ├── subquery.ts
│ │ │ ├── tables-rel.ts
│ │ │ ├── tables.ts
│ │ │ ├── update.ts
│ │ │ └── with.ts
│ │ ├── pg/
│ │ │ ├── 1-to-1-fk.ts
│ │ │ ├── array.ts
│ │ │ ├── count.ts
│ │ │ ├── db-rel.ts
│ │ │ ├── db.ts
│ │ │ ├── delete.ts
│ │ │ ├── generated-columns.ts
│ │ │ ├── insert.ts
│ │ │ ├── no-strict-null-checks/
│ │ │ │ ├── test.ts
│ │ │ │ └── tsconfig.json
│ │ │ ├── other.ts
│ │ │ ├── select.ts
│ │ │ ├── set-operators.ts
│ │ │ ├── subquery.ts
│ │ │ ├── tables-rel.ts
│ │ │ ├── tables.ts
│ │ │ ├── update.ts
│ │ │ └── with.ts
│ │ ├── singlestore/
│ │ │ ├── 1000columns.ts
│ │ │ ├── count.ts
│ │ │ ├── db.ts
│ │ │ ├── delete.ts
│ │ │ ├── insert.ts
│ │ │ ├── no-strict-null-checks/
│ │ │ │ ├── test.ts
│ │ │ │ └── tsconfig.json
│ │ │ ├── select.ts
│ │ │ ├── set-operators.ts
│ │ │ ├── subquery.ts
│ │ │ ├── tables.ts
│ │ │ ├── update.ts
│ │ │ └── with.ts
│ │ ├── sqlite/
│ │ │ ├── .gitignore
│ │ │ ├── count.ts
│ │ │ ├── db.ts
│ │ │ ├── delete.ts
│ │ │ ├── generated-columns.ts
│ │ │ ├── insert.ts
│ │ │ ├── no-strict-null-checks/
│ │ │ │ ├── test.ts
│ │ │ │ └── tsconfig.json
│ │ │ ├── other.ts
│ │ │ ├── select.ts
│ │ │ ├── set-operators.ts
│ │ │ ├── subquery.ts
│ │ │ ├── tables.ts
│ │ │ ├── update.ts
│ │ │ └── with.ts
│ │ ├── tsconfig.json
│ │ ├── utils/
│ │ │ └── neon-auth-token.ts
│ │ └── utils.ts
│ └── vitest.config.ts
├── drizzle-seed/
│ ├── README.md
│ ├── package.json
│ ├── rollup.config.ts
│ ├── scripts/
│ │ └── build.ts
│ ├── src/
│ │ ├── datasets/
│ │ │ ├── adjectives.ts
│ │ │ ├── cityNames.ts
│ │ │ ├── companyNameSuffixes.ts
│ │ │ ├── countries.ts
│ │ │ ├── emailDomains.ts
│ │ │ ├── firstNames.ts
│ │ │ ├── jobsTitles.ts
│ │ │ ├── lastNames.ts
│ │ │ ├── loremIpsumSentences.ts
│ │ │ ├── phonesInfo.ts
│ │ │ ├── states.ts
│ │ │ └── streetSuffix.ts
│ │ ├── index.ts
│ │ ├── services/
│ │ │ ├── GeneratorFuncs.ts
│ │ │ ├── Generators.ts
│ │ │ ├── SeedService.ts
│ │ │ ├── apiVersion.ts
│ │ │ ├── utils.ts
│ │ │ └── versioning/
│ │ │ └── v2.ts
│ │ └── types/
│ │ ├── drizzleStudio.ts
│ │ ├── seedService.ts
│ │ └── tables.ts
│ ├── tests/
│ │ ├── benchmarks/
│ │ │ └── generatorsBenchmark.ts
│ │ ├── mysql/
│ │ │ ├── allDataTypesTest/
│ │ │ │ ├── mysqlSchema.ts
│ │ │ │ └── mysql_all_data_types.test.ts
│ │ │ ├── cyclicTables/
│ │ │ │ ├── cyclicTables.test.ts
│ │ │ │ └── mysqlSchema.ts
│ │ │ ├── generatorsTest/
│ │ │ │ ├── generators.test.ts
│ │ │ │ └── mysqlSchema.ts
│ │ │ ├── mysql.test.ts
│ │ │ ├── mysqlSchema.ts
│ │ │ └── softRelationsTest/
│ │ │ ├── mysqlSchema.ts
│ │ │ └── softRelations.test.ts
│ │ ├── northwind/
│ │ │ ├── mysqlSchema.ts
│ │ │ ├── mysqlTest.ts
│ │ │ ├── pgSchema.ts
│ │ │ ├── pgTest.ts
│ │ │ ├── sqliteSchema.ts
│ │ │ └── sqliteTest.ts
│ │ ├── pg/
│ │ │ ├── allDataTypesTest/
│ │ │ │ ├── pgSchema.ts
│ │ │ │ └── pg_all_data_types.test.ts
│ │ │ ├── cyclicTables/
│ │ │ │ ├── cyclicTables.test.ts
│ │ │ │ └── pgSchema.ts
│ │ │ ├── generatorsTest/
│ │ │ │ ├── generators.test.ts
│ │ │ │ └── pgSchema.ts
│ │ │ ├── pg.test.ts
│ │ │ ├── pgSchema.ts
│ │ │ └── softRelationsTest/
│ │ │ ├── pgSchema.ts
│ │ │ └── softRelations.test.ts
│ │ └── sqlite/
│ │ ├── allDataTypesTest/
│ │ │ ├── sqliteSchema.ts
│ │ │ └── sqlite_all_data_types.test.ts
│ │ ├── cyclicTables/
│ │ │ ├── cyclicTables.test.ts
│ │ │ └── sqliteSchema.ts
│ │ ├── softRelationsTest/
│ │ │ ├── softRelations.test.ts
│ │ │ └── sqliteSchema.ts
│ │ ├── sqlite.test.ts
│ │ └── sqliteSchema.ts
│ ├── tsconfig.build.json
│ ├── tsconfig.json
│ ├── type-tests/
│ │ ├── mysql.ts
│ │ ├── pg.ts
│ │ ├── sqlite.ts
│ │ └── tsconfig.json
│ └── vitest.config.ts
├── drizzle-typebox/
│ ├── README.md
│ ├── package.json
│ ├── rollup.config.ts
│ ├── scripts/
│ │ ├── build.ts
│ │ └── fix-imports.ts
│ ├── src/
│ │ ├── column.ts
│ │ ├── column.types.ts
│ │ ├── constants.ts
│ │ ├── index.ts
│ │ ├── schema.ts
│ │ ├── schema.types.internal.ts
│ │ ├── schema.types.ts
│ │ └── utils.ts
│ ├── tests/
│ │ ├── mysql.test.ts
│ │ ├── pg.test.ts
│ │ ├── singlestore.test.ts
│ │ ├── sqlite.test.ts
│ │ ├── tsconfig.json
│ │ └── utils.ts
│ ├── tsconfig.build.json
│ ├── tsconfig.json
│ └── vitest.config.ts
├── drizzle-valibot/
│ ├── README.md
│ ├── package.json
│ ├── rollup.config.ts
│ ├── scripts/
│ │ ├── build.ts
│ │ └── fix-imports.ts
│ ├── src/
│ │ ├── column.ts
│ │ ├── column.types.ts
│ │ ├── constants.ts
│ │ ├── index.ts
│ │ ├── schema.ts
│ │ ├── schema.types.internal.ts
│ │ ├── schema.types.ts
│ │ └── utils.ts
│ ├── tests/
│ │ ├── mysql.test.ts
│ │ ├── pg.test.ts
│ │ ├── singlestore.test.ts
│ │ ├── sqlite.test.ts
│ │ ├── tsconfig.json
│ │ └── utils.ts
│ ├── tsconfig.build.json
│ ├── tsconfig.json
│ └── vitest.config.ts
├── drizzle-zod/
│ ├── README.md
│ ├── package.json
│ ├── rollup.config.ts
│ ├── scripts/
│ │ ├── build.ts
│ │ └── fix-imports.ts
│ ├── src/
│ │ ├── column.ts
│ │ ├── column.types.ts
│ │ ├── constants.ts
│ │ ├── index.ts
│ │ ├── schema.ts
│ │ ├── schema.types.internal.ts
│ │ ├── schema.types.ts
│ │ └── utils.ts
│ ├── tests/
│ │ ├── mysql.test.ts
│ │ ├── pg.test.ts
│ │ ├── singlestore.test.ts
│ │ ├── sqlite.test.ts
│ │ ├── tsconfig.json
│ │ └── utils.ts
│ ├── tsconfig.build.json
│ ├── tsconfig.json
│ └── vitest.config.ts
├── eslint/
│ └── eslint-plugin-drizzle-internal/
│ └── index.js
├── eslint-plugin-drizzle/
│ ├── .gitignore
│ ├── package.json
│ ├── readme.md
│ ├── src/
│ │ ├── configs/
│ │ │ ├── all.ts
│ │ │ └── recommended.ts
│ │ ├── enforce-delete-with-where.ts
│ │ ├── enforce-update-with-where.ts
│ │ ├── index.ts
│ │ └── utils/
│ │ ├── ast.ts
│ │ └── options.ts
│ ├── tests/
│ │ ├── delete.test.ts
│ │ └── update.test.ts
│ ├── tsconfig.json
│ └── vitest.config.ts
├── integration-tests/
│ ├── .gitignore
│ ├── .xata/
│ │ ├── migrations/
│ │ │ └── .ledger
│ │ └── version/
│ │ └── compatibility.json
│ ├── .xatarc
│ ├── docker-neon.yml
│ ├── drizzle2/
│ │ ├── mysql/
│ │ │ ├── 0000_nostalgic_carnage.sql
│ │ │ └── meta/
│ │ │ ├── 0000_snapshot.json
│ │ │ └── _journal.json
│ │ ├── mysql-proxy/
│ │ │ ├── first/
│ │ │ │ ├── 0000_nostalgic_carnage.sql
│ │ │ │ └── meta/
│ │ │ │ ├── 0000_snapshot.json
│ │ │ │ └── _journal.json
│ │ │ └── second/
│ │ │ ├── 0000_nostalgic_carnage.sql
│ │ │ ├── 0001_test.sql
│ │ │ └── meta/
│ │ │ ├── 0000_snapshot.json
│ │ │ ├── 0001_snapshot.json
│ │ │ └── _journal.json
│ │ ├── pg/
│ │ │ ├── 0000_puzzling_flatman.sql
│ │ │ ├── 0001_test.sql
│ │ │ └── meta/
│ │ │ ├── 0000_snapshot.json
│ │ │ └── _journal.json
│ │ ├── pg-proxy/
│ │ │ ├── first/
│ │ │ │ ├── 0000_puzzling_flatman.sql
│ │ │ │ └── meta/
│ │ │ │ ├── 0000_snapshot.json
│ │ │ │ └── _journal.json
│ │ │ └── second/
│ │ │ ├── 0000_puzzling_flatman.sql
│ │ │ ├── 0001_test.sql
│ │ │ └── meta/
│ │ │ ├── 0000_snapshot.json
│ │ │ ├── 0001_snapshot.json
│ │ │ └── _journal.json
│ │ ├── planetscale/
│ │ │ ├── 0000_nostalgic_carnage.sql
│ │ │ └── meta/
│ │ │ ├── 0000_snapshot.json
│ │ │ └── _journal.json
│ │ ├── singlestore/
│ │ │ ├── 0000_nostalgic_carnage.sql
│ │ │ └── meta/
│ │ │ ├── 0000_snapshot.json
│ │ │ └── _journal.json
│ │ └── sqlite/
│ │ ├── 0000_fancy_bug.sql
│ │ └── meta/
│ │ ├── 0000_snapshot.json
│ │ └── _journal.json
│ ├── js-tests/
│ │ └── driver-init/
│ │ ├── commonjs/
│ │ │ ├── better-sqlite3.test.cjs
│ │ │ ├── libsql.test.cjs
│ │ │ ├── mysql2.test.cjs
│ │ │ ├── neon-http.test.cjs
│ │ │ ├── neon-ws.test.cjs
│ │ │ ├── node-pg.test.cjs
│ │ │ ├── pglite.test.cjs
│ │ │ ├── planetscale.test.cjs
│ │ │ ├── postgres-js.test.cjs
│ │ │ ├── schema.cjs
│ │ │ ├── tidb.test.cjs
│ │ │ └── vercel.test.cjs
│ │ └── module/
│ │ ├── better-sqlite3.test.mjs
│ │ ├── libsql.test.mjs
│ │ ├── mysql2.test.mjs
│ │ ├── neon-http.test.mjs
│ │ ├── neon-ws.test.mjs
│ │ ├── node-pg.test.mjs
│ │ ├── pglite.test.mjs
│ │ ├── planetscale.test.mjs
│ │ ├── postgres-js.test.mjs
│ │ ├── schema.mjs
│ │ ├── tidb.test.mjs
│ │ └── vercel.test.mjs
│ ├── package.json
│ ├── sst-env.d.ts
│ ├── sst.config.ts
│ ├── tests/
│ │ ├── awsdatapi.alltypes.test.ts
│ │ ├── bun/
│ │ │ ├── bun-sql.test.ts
│ │ │ ├── sqlite-nw.test.ts
│ │ │ └── sqlite.test.ts
│ │ ├── common.ts
│ │ ├── extensions/
│ │ │ ├── postgis/
│ │ │ │ ├── pg.test.ts
│ │ │ │ └── postgres.test.ts
│ │ │ └── vectors/
│ │ │ ├── pg.test.ts
│ │ │ └── postgres.test.ts
│ │ ├── gel/
│ │ │ ├── cache.ts
│ │ │ ├── createInstance.ts
│ │ │ ├── gel-custom.test.ts
│ │ │ ├── gel-ext.test.ts
│ │ │ └── gel.test.ts
│ │ ├── imports/
│ │ │ └── index.test.ts
│ │ ├── mysql/
│ │ │ ├── mysql-common-cache.ts
│ │ │ ├── mysql-common.ts
│ │ │ ├── mysql-custom.test.ts
│ │ │ ├── mysql-planetscale.test.ts
│ │ │ ├── mysql-prefixed.test.ts
│ │ │ ├── mysql-proxy.test.ts
│ │ │ ├── mysql.test.ts
│ │ │ └── tidb-serverless.test.ts
│ │ ├── mysql-returning.test.ts
│ │ ├── pg/
│ │ │ ├── awsdatapi.test.ts
│ │ │ ├── neon-http-batch.test.ts
│ │ │ ├── neon-http-batch.ts
│ │ │ ├── neon-http.test.ts
│ │ │ ├── neon-serverless.test.ts
│ │ │ ├── node-postgres.test.ts
│ │ │ ├── pg-common-cache.ts
│ │ │ ├── pg-common.ts
│ │ │ ├── pg-custom.test.ts
│ │ │ ├── pg-proxy.test.ts
│ │ │ ├── pglite.test.ts
│ │ │ ├── postgres-js.test.ts
│ │ │ ├── rls/
│ │ │ │ └── rls.definition.test.ts
│ │ │ ├── vercel-pg.test.ts
│ │ │ └── xata-http.test.ts
│ │ ├── relational/
│ │ │ ├── bettersqlite.test.ts
│ │ │ ├── db.ts
│ │ │ ├── issues-schemas/
│ │ │ │ ├── duplicates/
│ │ │ │ │ ├── mysql/
│ │ │ │ │ │ ├── mysql.duplicates.test.ts
│ │ │ │ │ │ └── mysql.duplicates.ts
│ │ │ │ │ └── pg/
│ │ │ │ │ ├── pg.duplicates.test.ts
│ │ │ │ │ └── pg.duplicates.ts
│ │ │ │ └── wrong-mapping/
│ │ │ │ ├── pg.schema.ts
│ │ │ │ └── pg.test.ts
│ │ │ ├── mysql.planetscale.test.ts
│ │ │ ├── mysql.schema.ts
│ │ │ ├── mysql.test.ts
│ │ │ ├── pg.postgresjs.test.ts
│ │ │ ├── pg.schema.ts
│ │ │ ├── pg.test.ts
│ │ │ ├── singlestore.schema.ts
│ │ │ ├── singlestore.test.ts
│ │ │ ├── sqlite.schema.ts
│ │ │ ├── tables.ts
│ │ │ ├── turso.test.ts
│ │ │ └── vercel.test.ts
│ │ ├── replicas/
│ │ │ ├── mysql.test.ts
│ │ │ ├── postgres.test.ts
│ │ │ ├── singlestore.test.ts
│ │ │ └── sqlite.test.ts
│ │ ├── seeder/
│ │ │ ├── mysql.test.ts
│ │ │ ├── mysqlSchema.ts
│ │ │ ├── pg.test.ts
│ │ │ ├── pgSchema.ts
│ │ │ ├── sqlite.test.ts
│ │ │ └── sqliteSchema.ts
│ │ ├── singlestore/
│ │ │ ├── singlestore-cache.ts
│ │ │ ├── singlestore-common.ts
│ │ │ ├── singlestore-custom.test.ts
│ │ │ ├── singlestore-prefixed.test.ts
│ │ │ ├── singlestore-proxy.test.ts
│ │ │ └── singlestore.test.ts
│ │ ├── sqlite/
│ │ │ ├── better-sqlite.test.ts
│ │ │ ├── d1-batch.test.ts
│ │ │ ├── d1.test.ts
│ │ │ ├── durable-objects/
│ │ │ │ ├── drizzle/
│ │ │ │ │ ├── 0000_cuddly_black_bolt.sql
│ │ │ │ │ ├── meta/
│ │ │ │ │ │ ├── 0000_snapshot.json
│ │ │ │ │ │ └── _journal.json
│ │ │ │ │ └── migrations.js
│ │ │ │ ├── index.ts
│ │ │ │ ├── worker-configuration.d.ts
│ │ │ │ └── wrangler.toml
│ │ │ ├── libsql-batch.test.ts
│ │ │ ├── libsql-http.test.ts
│ │ │ ├── libsql-node.test.ts
│ │ │ ├── libsql-sqlite3.test.ts
│ │ │ ├── libsql-ws.test.ts
│ │ │ ├── libsql.test.ts
│ │ │ ├── sql-js.test.ts
│ │ │ ├── sqlite-common-cache.ts
│ │ │ ├── sqlite-common.ts
│ │ │ ├── sqlite-proxy-batch.test.ts
│ │ │ └── sqlite-proxy.test.ts
│ │ ├── utils/
│ │ │ └── is-config.test.ts
│ │ ├── utils.ts
│ │ ├── version.test.ts
│ │ └── xata/
│ │ └── xata.ts
│ ├── tsconfig.json
│ ├── type-tests/
│ │ └── join-nodenext/
│ │ ├── gel.ts
│ │ ├── mysql.ts
│ │ ├── package.json
│ │ ├── pg.ts
│ │ ├── singlestore.ts
│ │ ├── sqlite.ts
│ │ └── tsconfig.json
│ ├── vitest-ci.config.ts
│ └── vitest.config.ts
├── package.json
├── patches/
│ └── typescript@5.6.3.patch
├── pnpm-workspace.yaml
├── tsconfig.json
└── turbo.json
================================================
FILE CONTENTS
================================================
================================================
FILE: .eslintignore
================================================
node_modules
dist
dist-dts
examples
**/*.js
**/*.mjs
**/*.cjs
**/playground
integration-tests/tests/prisma/*/client
integration-tests/tests/prisma/*/drizzle
drizzle-kit/*
================================================
FILE: .eslintrc.yaml
================================================
root: true
extends:
- 'eslint:recommended'
- 'plugin:@typescript-eslint/recommended'
- 'plugin:unicorn/recommended'
parser: '@typescript-eslint/parser'
parserOptions:
project: './tsconfig.json'
plugins:
- import
- unused-imports
- no-instanceof
- drizzle-internal
overrides:
- files:
- '**/tests/**/*.ts'
- '**/type-tests/**/*.ts'
rules:
import/extensions: 'off'
no-instanceof: 'off'
- files: 'eslint-plugin-drizzle/**/*'
rules:
import/extensions: 'off'
rules:
'@typescript-eslint/consistent-type-imports':
- error
- disallowTypeAnnotations: false
fixStyle: separate-type-imports
'@typescript-eslint/no-import-type-side-effects': 'error'
import/no-cycle: error
import/no-self-import: error
import/no-empty-named-blocks: error
unused-imports/no-unused-imports: error
import/no-useless-path-segments: error
import/newline-after-import: error
import/no-duplicates: error
import/extensions:
- error
- always
- ignorePackages: true
'@typescript-eslint/no-explicit-any': 'off'
'@typescript-eslint/no-non-null-assertion': 'off'
'@typescript-eslint/no-namespace': 'off'
'@typescript-eslint/no-unused-vars':
- error
- argsIgnorePattern: '^_'
varsIgnorePattern: '^_'
'@typescript-eslint/ban-types':
- error
- extendDefaults: true
types:
'{}' : false
'@typescript-eslint/no-this-alias': 'off'
'@typescript-eslint/no-var-requires': 'off'
'unicorn/prefer-node-protocol': 'off'
'unicorn/prefer-top-level-await': 'off'
'unicorn/prevent-abbreviations': 'off'
'unicorn/prefer-switch': 'off'
'unicorn/catch-error-name': 'off'
'unicorn/no-null': 'off'
'unicorn/numeric-separators-style': 'off'
'unicorn/explicit-length-check': 'off'
'unicorn/filename-case': 'off'
'unicorn/prefer-module': 'off'
'unicorn/no-array-reduce': 'off'
'unicorn/no-nested-ternary': 'off'
'unicorn/no-useless-undefined':
- error
- checkArguments: false
'unicorn/no-this-assignment': 'off'
'unicorn/empty-brace-spaces': 'off'
'unicorn/no-thenable': 'off'
'unicorn/consistent-function-scoping': 'off'
'unicorn/prefer-type-error': 'off'
'unicorn/relative-url-style': 'off'
'eqeqeq': 'error'
'no-instanceof/no-instanceof': 'error'
'drizzle-internal/require-entity-kind': 'error'
'unicorn/prefer-string-replace-all': 'off'
'unicorn/no-process-exit': 'off'
'@typescript-eslint/ban-ts-comment': 'off'
'@typescript-eslint/no-empty-interface': 'off'
'@typescript-eslint/no-unsafe-declaration-merging': 'off'
'no-inner-declarations': 'off'
================================================
FILE: .github/FUNDING.yml
================================================
# These are supported funding model platforms
github: drizzle-team
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
================================================
FILE: .github/ISSUE_TEMPLATE/bug-template.yaml
================================================
name: "Bug Report"
description: Report an issue or possible bug
title: "[BUG]:"
labels: ["bug"]
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to file a bug report! Please provide as much information as possible.
- type: checkboxes
id: verified
attributes:
label: Report hasn't been filed before.
options:
- label: I have verified that the bug I'm about to report hasn't been filed before.
required: true
- type: input
attributes:
label: What version of `drizzle-orm` are you using?
description: You can check the version by opening the `package.json` file in your project.
placeholder: 0.0.0
validations:
required: true
- type: input
attributes:
label: What version of `drizzle-kit` are you using?
description: You can check the version by opening the `package.json` file in your project.
placeholder: 0.0.0
validations:
required: true
- type: input
attributes:
label: Other packages
description: If this bug is related to one of the other first-party packages we maintain, please list them here alongside their version.
placeholder: drizzle-zod@0.0.0, drizzle-valibot@0.0.0
validations:
required: false
- type: textarea
attributes:
label: Describe the Bug
description: |
To fill this field, please answer the following:
- What is the undesired behavior?
- What are the steps to reproduce it?
- What is the desired result?
If the issue is more specific, consider answering the following questions if you think they may be relevant:
- What database engine are you using? Are you using a specific cloud provider? Which one?
- Do you think this bug pertains to a specific database driver? Which one?
- Are you working in a monorepo?
- If this is a bug related to types: What Typescript version are you using? What's the content of your tsconfig.json file?
- If you're using a runtime that isn't Node.js: Which one? What version? Have you verified that this isn't an issue with the runtime itself?
validations:
required: true
================================================
FILE: .github/ISSUE_TEMPLATE/config.yml
================================================
blank_issues_enabled: true
contact_links:
- name: Ask a question
url: https://discord.gg/JGrkEU4Scj
about: Ask questions and discuss with other community members in Discord
================================================
FILE: .github/ISSUE_TEMPLATE/docs-template.yaml
================================================
name: "Documentation Enhancement"
description: Suggest documentation improvements
title: "[DOCS]:"
labels: ["docs"]
body:
- type: checkboxes
id: verified
attributes:
label: Enhancement hasn't been filed before.
options:
- label: I have verified this enhancement I'm about to request hasn't been suggested before.
required: true
- type: textarea
attributes:
label: Describe the enhancement you want to request
description: What do you want to change or add to the documentation?
validations:
required: true
================================================
FILE: .github/ISSUE_TEMPLATE/feature-template.yaml
================================================
name: "Feature Request"
description: Suggest new feature
title: "[FEATURE]:"
labels: ["enhancement"]
body:
- type: checkboxes
id: verified
attributes:
label: Feature hasn't been suggested before.
options:
- label: I have verified this feature I'm about to request hasn't been suggested before.
required: true
- type: textarea
attributes:
label: Describe the enhancement you want to request
description: What do you want to change or add? What are the benefits of implementing this?
validations:
required: true
================================================
FILE: .github/workflows/codeql.yml
================================================
name: "CodeQL"
on:
push:
branches: [ 'main', 'beta' ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ 'main' ]
schedule:
- cron: '44 16 * * 0'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-22.04
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Use only 'java' to analyze code written in Java, Kotlin or both
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# ℹ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"
================================================
FILE: .github/workflows/release-feature-branch.yaml
================================================
name: Release (feature branch)
on:
workflow_call:
secrets:
PLANETSCALE_CONNECTION_STRING:
required: true
NEON_CONNECTION_STRING:
required: true
# NEON_HTTP_CONNECTION_STRING:
# required: true
TIDB_CONNECTION_STRING:
required: true
XATA_API_KEY:
required: true
XATA_BRANCH:
required: true
LIBSQL_REMOTE_URL:
required: true
LIBSQL_REMOTE_TOKEN:
required: true
jobs:
test:
# only run on all pushes or pull requests from forks
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
strategy:
matrix:
shard:
- gel
- planetscale
- singlestore-core
- singlestore-proxy
- singlestore-prefixed
- singlestore-custom
- neon-http
- neon-serverless
- drizzle-orm
- drizzle-kit
- drizzle-zod
- drizzle-seed
- drizzle-typebox
- drizzle-valibot
- drizzle-arktype
- other
runs-on: ubuntu-22.04
services:
postgres-postgis:
image: postgis/postgis:16-3.4
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 54322:5432
postgres-vector:
image: pgvector/pgvector:pg16
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 54321:5432
postgres:
image: postgres:14
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 55433:5432
mysql:
image: mysql:8
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: drizzle
options: >-
--health-cmd "mysqladmin ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 33306:3306
singlestore:
image: ghcr.io/singlestore-labs/singlestoredb-dev:latest
env:
ROOT_PASSWORD: singlestore
ports:
- 33307:3306
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '20.19'
registry-url: 'https://registry.npmjs.org'
- uses: pnpm/action-setup@v3
name: Install pnpm
id: pnpm-install
with:
version: latest
run_install: false
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
name: Setup pnpm cache
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install
- name: Build Prisma client
working-directory: drizzle-orm
run: pnpm prisma generate --schema src/prisma/schema.prisma
- name: Build
run: pnpm build
- name: Run tests
env:
PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:55433/drizzle
PG_VECTOR_CONNECTION_STRING: postgres://postgres:postgres@localhost:54321/drizzle
PG_POSTGIS_CONNECTION_STRING: postgres://postgres:postgres@localhost:54322/drizzle
MYSQL_CONNECTION_STRING: mysql://root:root@localhost:33306/drizzle
PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }}
NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }}
# NEON_HTTP_CONNECTION_STRING: postgres://postgres:postgres@db.localtest.me:5432/postgres
NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }}
NEON_SERVERLESS_CONNECTION_STRING: postgres://postgres:postgres@localhost:5445/postgres
TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }}
XATA_API_KEY: ${{ secrets.XATA_API_KEY }}
XATA_BRANCH: ${{ secrets.XATA_BRANCH }}
LIBSQL_URL: file:local.db
LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }}
LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }}
SINGLESTORE_CONNECTION_STRING: singlestore://root:singlestore@localhost:33307/
working-directory: integration-tests
run: |
if [[ ${{ github.event_name }} != "push" && "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]]; then
export SKIP_EXTERNAL_DB_TESTS=1
fi
case ${{ matrix.shard }} in
gel)
if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then
pnpm vitest run tests/gel
fi
;;
planetscale)
if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then
pnpm vitest run \
tests/mysql/mysql-planetscale.test.ts \
tests/relational/mysql.planetscale-v1.test.ts \
tests/relational/mysql.planetscale.test.ts
fi
;;
singlestore-core)
pnpm vitest run tests/singlestore/singlestore.test.ts
;;
singlestore-proxy)
pnpm vitest run tests/singlestore/singlestore-proxy.test.ts
;;
singlestore-prefixed)
pnpm vitest run tests/singlestore/singlestore-prefixed.test.ts
;;
singlestore-custom)
pnpm vitest run tests/singlestore/singlestore-custom.test.ts
;;
neon-http)
if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then
pnpm vitest run tests/pg/neon-http.test.ts tests/pg/neon-http-batch.test.ts
fi
;;
neon-serverless)
docker compose -f docker-neon.yml up -d
pnpm vitest run --config=./vitest-ci.config.ts tests/pg/neon-serverless.test.ts
docker compose -f docker-neon.yml down
;;
drizzle-orm|drizzle-kit|drizzle-zod|drizzle-seed|drizzle-typebox|drizzle-valibot|drizzle-arktype)
(cd .. && pnpm test --filter ${{ matrix.shard }})
;;
other)
pnpm vitest run \
--exclude tests/gel \
--exclude tests/mysql/mysql-planetscale.test.ts \
--exclude tests/relational/mysql.planetscale-v1.test.ts \
--exclude tests/relational/mysql.planetscale.test.ts \
--exclude tests/singlestore/singlestore.test.ts \
--exclude tests/singlestore/singlestore-proxy.test.ts \
--exclude tests/singlestore/singlestore-prefixed.test.ts \
--exclude tests/singlestore/singlestore-custom.test.ts \
--exclude tests/pg/neon-http.test.ts \
--exclude tests/pg/neon-http-batch.test.ts \
--exclude tests/pg/neon-serverless.test.ts
;;
esac
attw:
# only run on all pushes or pull requests from forks
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
strategy:
matrix:
package:
- drizzle-orm
- drizzle-kit
- drizzle-zod
- drizzle-seed
- drizzle-typebox
- drizzle-valibot
- drizzle-arktype
- eslint-plugin-drizzle
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '22'
registry-url: 'https://registry.npmjs.org'
- uses: pnpm/action-setup@v3
name: Install pnpm
id: pnpm-install
with:
version: latest
run_install: false
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
name: Setup pnpm cache
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install
- name: Install Bun
uses: oven-sh/setup-bun@v2
- name: Check preconditions
id: checks
shell: bash
working-directory: ${{ matrix.package }}
run: |
old_version="$(jq -r .version package.json)"
version="$old_version-$(git rev-parse --short HEAD)"
npm version $version
tag="${{ github.ref_name }}"
is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')"
if [[ "$is_version_published" == "true" ]]; then
echo "\`${{ matrix.package }}$version\` already published, adding tag \`$tag\`" >> $GITHUB_STEP_SUMMARY
npm dist-tag add ${{ matrix.package }}@$version $tag
else
{
echo "version=$version"
echo "tag=$tag"
echo "has_new_release=true"
} >> $GITHUB_OUTPUT
fi
- name: Build Prisma client
if: steps.checks.outputs.has_new_release == 'true'
working-directory: drizzle-orm
run: pnpm prisma generate --schema src/prisma/schema.prisma
- name: Build
if: steps.checks.outputs.has_new_release == 'true'
run: pnpm build
- name: Pack
if: steps.checks.outputs.has_new_release == 'true'
working-directory: ${{ matrix.package }}
run: npm run pack
- name: Run @arethetypeswrong/cli
if: steps.checks.outputs.has_new_release == 'true'
working-directory: ${{ matrix.package }}
run: bunx attw package.tgz
release:
# only run on all pushes or pull requests from forks
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
needs:
- test
- attw
strategy:
matrix:
package:
- drizzle-orm
- drizzle-kit
- drizzle-zod
- drizzle-seed
- drizzle-typebox
- drizzle-valibot
- drizzle-arktype
- eslint-plugin-drizzle
runs-on: ubuntu-22.04
permissions:
contents: read
id-token: write # for OIDC
# force empty so npm can use OIDC
env:
NODE_AUTH_TOKEN: ""
NPM_TOKEN: ""
steps:
- uses: actions/checkout@v5
- uses: pnpm/action-setup@v4
with: { run_install: false }
- uses: actions/setup-node@v6
with: { node-version: '24', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml }
- run: pnpm install --frozen-lockfile --prefer-offline
# >= 11.5.1 for trusted publishing
- name: Update NPM
run: npm install -g npm@latest
# nuke, so npm can use OIDC
- name: Remove temp npmrc
run: rm -f "$NPM_CONFIG_USERCONFIG"
- name: Check preconditions
id: checks
shell: bash
working-directory: ${{ matrix.package }}
run: |
old_version="$(jq -r .version package.json)"
version="$old_version-$(git rev-parse --short HEAD)"
npm version $version
tag="${{ github.ref_name }}"
is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')"
if [[ "$is_version_published" == "true" ]]; then
echo "\`${{ matrix.package }}$version\` already published, adding tag \`$tag\`" >> $GITHUB_STEP_SUMMARY
else
{
echo "version=$version"
echo "tag=$tag"
echo "has_new_release=true"
} >> $GITHUB_OUTPUT
fi
- name: Build Prisma client
working-directory: drizzle-orm
run: pnpm prisma generate --schema src/prisma/schema.prisma
- name: Build
if: steps.checks.outputs.has_new_release == 'true'
run: pnpm build
- name: Pack
if: steps.checks.outputs.has_new_release == 'true'
working-directory: ${{ matrix.package }}
shell: bash
run: npm run pack
- name: Publish
if: github.event_name == 'push' && steps.checks.outputs.has_new_release == 'true'
working-directory: ${{ matrix.package }}
shell: bash
run: |
tag="${{ steps.checks.outputs.tag }}"
version="${{ steps.checks.outputs.version }}"
echo "Publishing ${{ matrix.package }}@$tag using version $version"
npm run publish -- --tag $tag
echo "npm: \`${{ matrix.package }}@$tag | ${{ matrix.package }}@$version\`" >> $GITHUB_STEP_SUMMARY
# Post release message to Discord
# curl -X POST -H "Content-Type: application/json" -d "{\"embeds\": [{\"title\": \"New \`${{ matrix.package }}\` release! 🎉\", \"url\": \"https://www.npmjs.com/package/${{ matrix.package }}/v/$version\", \"color\": \"12907856\", \"fields\": [{\"name\": \"Version\", \"value\": \"\`$version\`\"}, {\"name\": \"Tag\", \"value\": \"\`$tag\`\"}]}]}" ${{ secrets.DISCORD_DEV_RELEASE_WEBHOOK_URL }}
================================================
FILE: .github/workflows/release-latest.yaml
================================================
name: Release (latest)
on:
workflow_call:
secrets:
PLANETSCALE_CONNECTION_STRING:
required: true
NEON_CONNECTION_STRING:
required: true
# NEON_HTTP_CONNECTION_STRING:
# required: true
TIDB_CONNECTION_STRING:
required: true
XATA_API_KEY:
required: true
XATA_BRANCH:
required: true
LIBSQL_REMOTE_URL:
required: true
LIBSQL_REMOTE_TOKEN:
required: true
jobs:
test:
strategy:
matrix:
shard:
- gel
- planetscale
- singlestore-core
- singlestore-proxy
- singlestore-prefixed
- singlestore-custom
- neon-http
- neon-serverless
- drizzle-orm
- drizzle-kit
- drizzle-zod
- drizzle-seed
- drizzle-typebox
- drizzle-valibot
- drizzle-arktype
- other
runs-on: ubuntu-22.04
services:
postgres-postgis:
image: postgis/postgis:16-3.4
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 54322:5432
postgres-vector:
image: pgvector/pgvector:pg16
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 54321:5432
postgres:
image: postgres:14
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 55433:5432
mysql:
image: mysql:8
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: drizzle
options: >-
--health-cmd "mysqladmin ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 33306:3306
singlestore:
image: ghcr.io/singlestore-labs/singlestoredb-dev:latest
env:
ROOT_PASSWORD: singlestore
ports:
- 33307:3306
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '20.19'
registry-url: 'https://registry.npmjs.org'
- uses: pnpm/action-setup@v3
name: Install pnpm
id: pnpm-install
with:
version: latest
run_install: false
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
name: Setup pnpm cache
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install
- name: Build Prisma client
working-directory: drizzle-orm
run: pnpm prisma generate --schema src/prisma/schema.prisma
- name: Build
run: pnpm build
- name: Run tests
env:
PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:55433/drizzle
PG_VECTOR_CONNECTION_STRING: postgres://postgres:postgres@localhost:54321/drizzle
PG_POSTGIS_CONNECTION_STRING: postgres://postgres:postgres@localhost:54322/drizzle
MYSQL_CONNECTION_STRING: mysql://root:root@localhost:33306/drizzle
PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }}
NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }}
# NEON_HTTP_CONNECTION_STRING: postgres://postgres:postgres@db.localtest.me:5432/postgres
NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }}
NEON_SERVERLESS_CONNECTION_STRING: postgres://postgres:postgres@localhost:5445/postgres
TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }}
XATA_API_KEY: ${{ secrets.XATA_API_KEY }}
XATA_BRANCH: ${{ secrets.XATA_BRANCH }}
LIBSQL_URL: file:local.db
LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }}
LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }}
SINGLESTORE_CONNECTION_STRING: singlestore://root:singlestore@localhost:33307/
working-directory: integration-tests
run: |
case ${{ matrix.shard }} in
gel)
pnpm vitest run tests/gel
;;
planetscale)
pnpm vitest run \
tests/mysql/mysql-planetscale.test.ts \
tests/relational/mysql.planetscale-v1.test.ts \
tests/relational/mysql.planetscale.test.ts
;;
singlestore-core)
pnpm vitest run tests/singlestore/singlestore.test.ts
;;
singlestore-proxy)
pnpm vitest run tests/singlestore/singlestore-proxy.test.ts
;;
singlestore-prefixed)
pnpm vitest run tests/singlestore/singlestore-prefixed.test.ts
;;
singlestore-custom)
pnpm vitest run tests/singlestore/singlestore-custom.test.ts
;;
neon-http)
pnpm vitest run tests/pg/neon-http.test.ts tests/pg/neon-http-batch.test.ts
;;
neon-serverless)
docker compose -f docker-neon.yml up -d
pnpm vitest run tests/pg/neon-serverless.test.ts
docker compose -f docker-neon.yml down
;;
drizzle-orm|drizzle-kit|drizzle-zod|drizzle-seed|drizzle-typebox|drizzle-valibot|drizzle-arktype)
(cd .. && pnpm test --filter ${{ matrix.shard }})
;;
other)
pnpm vitest run \
--exclude tests/gel \
--exclude tests/mysql/mysql-planetscale.test.ts \
--exclude tests/relational/mysql.planetscale-v1.test.ts \
--exclude tests/relational/mysql.planetscale.test.ts \
--exclude tests/singlestore/singlestore.test.ts \
--exclude tests/singlestore/singlestore-proxy.test.ts \
--exclude tests/singlestore/singlestore-prefixed.test.ts \
--exclude tests/singlestore/singlestore-custom.test.ts \
--exclude tests/pg/neon-http.test.ts \
--exclude tests/pg/neon-http-batch.test.ts \
--exclude tests/pg/neon-serverless.test.ts
;;
esac
attw:
strategy:
matrix:
package:
- drizzle-orm
- drizzle-kit
- drizzle-zod
- drizzle-seed
- drizzle-typebox
- drizzle-valibot
- drizzle-arktype
- eslint-plugin-drizzle
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '18.18'
registry-url: 'https://registry.npmjs.org'
- uses: pnpm/action-setup@v3
name: Install pnpm
id: pnpm-install
with:
version: latest
run_install: false
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
name: Setup pnpm cache
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install
- name: Install Bun
uses: oven-sh/setup-bun@v2
- name: Check preconditions
id: checks
shell: bash
working-directory: ${{ matrix.package }}
run: |
latest="$(npm view --json ${{ matrix.package }} dist-tags.latest | jq -r)"
version="$(jq -r .version package.json)"
is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')"
if [[ "$is_version_published" == "false" && "$latest" != "$version" ]]; then
{
echo "version=$version"
echo "has_new_release=true"
echo "changelog_path=$changelogPath"
} >> $GITHUB_OUTPUT
fi
- name: Build Prisma client
if: steps.checks.outputs.has_new_release == 'true'
working-directory: drizzle-orm
run: pnpm prisma generate --schema src/prisma/schema.prisma
- name: Build
if: steps.checks.outputs.has_new_release == 'true'
run: pnpm build
- name: Pack
if: steps.checks.outputs.has_new_release == 'true'
working-directory: ${{ matrix.package }}
run: npm run pack
- name: Run @arethetypeswrong/cli
if: steps.checks.outputs.has_new_release == 'true'
working-directory: ${{ matrix.package }}
run: bunx attw package.tgz
release:
permissions:
contents: write # for creating GitHub releases
id-token: write # for OIDC
needs:
- test
- attw
strategy:
fail-fast: false
matrix:
package:
- drizzle-orm
- drizzle-kit
- drizzle-zod
- drizzle-seed
- drizzle-typebox
- drizzle-valibot
- drizzle-arktype
- eslint-plugin-drizzle
runs-on: ubuntu-22.04
# force empty so npm can use OIDC
env:
NODE_AUTH_TOKEN: ""
NPM_TOKEN: ""
services:
postgres-postgis:
image: postgis/postgis:16-3.4
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 54322:5432
postgres-vector:
image: pgvector/pgvector:pg16
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 54321:5432
postgres:
image: postgres:14
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: drizzle
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 55433:5432
mysql:
image: mysql:8
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: drizzle
options: >-
--health-cmd "mysqladmin ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 33306:3306
steps:
- uses: actions/checkout@v5
- uses: pnpm/action-setup@v4
with: { run_install: false }
- uses: actions/setup-node@v6
with: { node-version: '24', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml }
- run: pnpm install --frozen-lockfile --prefer-offline
# >= 11.5.1 for trusted publishing
- name: Update NPM
run: npm install -g npm@latest
# nuke, so npm can use OIDC
- name: Remove temp npmrc
run: rm -f "$NPM_CONFIG_USERCONFIG"
- name: Check preconditions
id: checks
shell: bash
working-directory: ${{ matrix.package }}
run: |
latest="$(npm view --json ${{ matrix.package }} dist-tags.latest | jq -r)"
version="$(jq -r .version package.json)"
is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')"
if [[ "$is_version_published" == "true" ]]; then
echo "\`${{ matrix.package }}@$version\` already published, adding tag \`latest\`" >> $GITHUB_STEP_SUMMARY
elif [[ "$latest" != "$version" ]]; then
echo "Latest: $latest"
echo "Current: $version"
changelogPath=$(node -e "console.log(require('path').resolve('..', 'changelogs', '${{ matrix.package }}', '$version.md'))")
if [[ ! -f "$changelogPath" ]]; then
echo "::error::Changelog for version $version not found: $changelogPath"
exit 1
fi
{
echo "version=$version"
echo "has_new_release=true"
echo "changelog_path=$changelogPath"
} >> $GITHUB_OUTPUT
else
echo "Already up to date: $version"
echo "\`$version\` is already latest on NPM" >> $GITHUB_STEP_SUMMARY
fi
- name: Build Prisma client
if: steps.checks.outputs.has_new_release == 'true'
working-directory: drizzle-orm
run: pnpm prisma generate --schema src/prisma/schema.prisma
- name: Build
if: steps.checks.outputs.has_new_release == 'true'
run: pnpm build
- name: Pack
if: steps.checks.outputs.has_new_release == 'true'
working-directory: ${{ matrix.package }}
shell: bash
run: npm run pack
- name: Publish
if: steps.checks.outputs.has_new_release == 'true'
working-directory: ${{ matrix.package }}
shell: bash
run: |
version="${{ steps.checks.outputs.version }}"
echo "Publishing ${{ matrix.package }}@$version"
npm run publish
echo "npm: \`+ ${{ matrix.package }}@$version\`" >> $GITHUB_STEP_SUMMARY
# Post release message to Discord
# curl -X POST -H "Content-Type: application/json" -d "{\"embeds\": [{\"title\": \"New \`${{ matrix.package }}\` release! 🎉\", \"url\": \"https://www.npmjs.com/package/${{ matrix.package }}\", \"color\": \"12907856\", \"fields\": [{\"name\": \"Tag\", \"value\": \"\`$tag\`\"}]}]}" ${{ secrets.DISCORD_RELEASE_WEBHOOK_URL }}
- name: Create GitHub release for ORM package
uses: actions/github-script@v6
if: matrix.package == 'drizzle-orm' && steps.checks.outputs.has_new_release == 'true'
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
try {
const fs = require("fs");
const path = require("path");
const version = "${{ steps.checks.outputs.version }}";
const changelog = fs.readFileSync("${{ steps.checks.outputs.changelog_path }}", "utf8");
const release = await github.rest.repos.createRelease({
owner: context.repo.owner,
repo: context.repo.repo,
tag_name: `${version}`,
name: `${version}`,
body: changelog,
});
await github.rest.repos.uploadReleaseAsset({
owner: context.repo.owner,
repo: context.repo.repo,
release_id: release.data.id,
name: `${{ matrix.package }}-${version}-dist.tgz`,
data: fs.readFileSync(path.resolve("${{ matrix.package }}", "package.tgz")),
});
} catch (e) {
core.setFailed(e.message);
}
- name: Create GitHub release for KIT package
uses: actions/github-script@v6
if: matrix.package == 'drizzle-kit' && steps.checks.outputs.has_new_release == 'true'
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
try {
const fs = require("fs");
const path = require("path");
const version = "${{ steps.checks.outputs.version }}";
const changelog = fs.readFileSync("${{ steps.checks.outputs.changelog_path }}", "utf8");
const release = await github.rest.repos.createRelease({
owner: context.repo.owner,
repo: context.repo.repo,
tag_name: `drizzle-kit@${version}`,
name: `drizzle-kit@${version}`,
body: changelog,
});
await github.rest.repos.uploadReleaseAsset({
owner: context.repo.owner,
repo: context.repo.repo,
release_id: release.data.id,
name: `${{ matrix.package }}-${version}-dist.tgz`,
data: fs.readFileSync(path.resolve("${{ matrix.package }}", "package.tgz")),
});
} catch (e) {
core.setFailed(e.message);
}
================================================
FILE: .github/workflows/router.yaml
================================================
name: Release Router
on:
push:
branches-ignore:
- main
pull_request:
workflow_dispatch:
jobs:
switch:
runs-on: ubuntu-24.04
outputs:
target: ${{ steps.route.outputs.target }}
steps:
- name: Route release
id: route
shell: bash
run: |
HEAD_REPO="${{ github.event.pull_request.head.repo.full_name }}"
if [[ "$GITHUB_EVENT_NAME" == "workflow_dispatch" && "${GITHUB_REF##*/}" == "main" ]]; then
echo "target=latest" >> $GITHUB_OUTPUT
# only run on all pushes or pull requests from forks
elif [[ "$GITHUB_EVENT_NAME" == "push" ]] || [[ "$HEAD_REPO" != "$GITHUB_REPOSITORY" ]]; then
echo "target=feature" >> $GITHUB_OUTPUT
else
echo "target=skip" >> $GITHUB_OUTPUT
fi
run-feature:
needs: switch
if: needs.switch.outputs.target == 'feature'
uses: ./.github/workflows/release-feature-branch.yaml
secrets:
PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }}
NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }}
# NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }}
TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }}
XATA_API_KEY: ${{ secrets.XATA_API_KEY }}
XATA_BRANCH: ${{ secrets.XATA_BRANCH }}
LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }}
LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }}
run-latest:
needs: switch
if: needs.switch.outputs.target == 'latest'
uses: ./.github/workflows/release-latest.yaml
secrets:
PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }}
NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }}
# NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }}
TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }}
XATA_API_KEY: ${{ secrets.XATA_API_KEY }}
XATA_BRANCH: ${{ secrets.XATA_BRANCH }}
LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }}
LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }}
================================================
FILE: .github/workflows/unpublish-release-feature-branch.yaml
================================================
name: Unpublish release (feature branch)
on: delete
jobs:
unpublish-release:
if: github.event.ref_type == 'branch' && github.event.ref != 'refs/heads/main' && github.event.ref != 'refs/heads/beta'
strategy:
matrix:
package:
- drizzle-orm
- drizzle-kit
- drizzle-zod
- drizzle-typebox
- drizzle-valibot
- drizzle-arktype
- eslint-plugin-drizzle
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '22'
registry-url: 'https://registry.npmjs.org'
- name: Unpublish
run: |
tag="${{ github.event.ref }}"
tag="${tag#refs/heads/}"
echo "Unpublishing ${{ matrix.package }}@$tag"
npm dist-tag rm ${{ matrix.package }} $tag
echo "npm: \`- ${{ matrix.package }}@$tag\`" >> $GITHUB_STEP_SUMMARY
working-directory: ${{ matrix.package }}
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_ACCESS_TOKEN }}
================================================
FILE: .gitignore
================================================
node_modules
.vscode
dist
dist.new
*.tsbuildinfo
*.tgz
/*.sql
.cache
.turbo
.rollup.cache
dist-dts
rollup.config-*.mjs
*.log
.DS_Store
drizzle-seed/src/dev
================================================
FILE: .markdownlint.yaml
================================================
no-inline-html: false
first-line-h1: false
line-length: false
MD010:
spaces_per_tab: 2
================================================
FILE: .npmrc
================================================
# prefer-workspace-packages = true
================================================
FILE: .nvmrc
================================================
22
================================================
FILE: CODE_OF_CONDUCT.md
================================================
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
help@drizzle.team.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.
================================================
FILE: CONTRIBUTING.md
================================================
# Contributing
Welcome! We're glad you're interested in Drizzle ORM and want to help us make it better.
Drizzle ORM is owned by [Drizzle Team](https://drizzle.team) and maintained by community members, mainly by our core contributors ([@AndriiSherman](https://github.com/AndriiSherman), [@AlexBlokh](https://github.com/AlexBlokh), [@dankochetov](https://github.com/dankochetov)). Everything that is going to be merged should be approved by all core contributors members.
---
There are many ways you can contribute to the Drizzle ORM project:
- [Submitting bug reports](#bug-report)
- [Submitting feature request](#feature-request)
- [Providing feedback](#feedback)
- [Contribution guidelines](#contribution-guidelines)
## Submitting bug report
To report a bug or issue, please use our [issue form](https://github.com/drizzle-team/drizzle-orm/issues/new/choose) and choose Bug Report.
## Submitting feature request
To request a feature, please use our [issue form](https://github.com/drizzle-team/drizzle-orm/issues/new/choose) and choose Feature Request.
## Providing feedback
There are several ways you can provide feedback:
- You can join our [Discord server](https://discord.gg/yfjTbVXMW4) and provide feedback there.
- You can add new ticket in [Discussions](https://github.com/drizzle-team/drizzle-orm/discussions).
- Mention our [Twitter account](https://twitter.com/DrizzleOrm).
## Contribution guidelines
- [Pre-contribution setup](#pre-contribution)
- [Installing Node](#installing-node)
- [Installing pnpm](#installing-pnpm)
- [Installing Docker](#installing-docker)
- [Cloning the repository](#cloning-the-repository)
- [Repository structure](#repository-structure)
- [Building the project](#building-the-project)
- [Commit message guidelines](#commit-message-guidelines)
- [Contributing to `drizzle-orm`](#contributing-orm)
- [Project structure](#project-structure-orm)
- [Running tests](#running-tests-orm)
- [PR guidelines](#pr-guidelines-orm)
- [Contributing to `drizzle-kit`](#contributing-kit)
- [Project structure](#project-structure-kit)
- [Running tests](#running-tests-kit)
- [PR guidelines](#pr-guidelines-kit)
## Pre-contribution setup
### Installing Node
```bash
# https://github.com/nvm-sh/nvm#install--update-script
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash
nvm install 18.13.0
nvm use 18.13.0
```
### Installing pnpm
```bash
# https://pnpm.io/installation
npm install -g pnpm
```
### Installing Docker
```bash
# https://docs.docker.com/get-docker/
# Use Docker's guide to install Docker for your OS.
```
### Cloning the repository
```bash
git clone https://github.com/drizzle-team/drizzle-orm.git
cd drizzle-orm
```
### Repository structure
- 📂 `drizzle-orm/`
orm core package with all main logic for each dialect
- 📂 `drizzle-kit/`
kit core package with all main logic and tests for each dialect
- 📂 `drizzle-typebox/`
all the code related to drizzle+typebox extension
- 📂 `drizzle-valibot/`
all the code related to drizzle+valibot extension
- 📂 `drizzle-zod/`
all the code related to drizzle+zod extension
- 📂 `eslint-plugin-drizzle/`
all the code related to drizzle eslint plugin
- 📂 `changelogs/`
all changelogs for drizzle-orm, drizzle-kit, drizzle-typebox, drizzle-zod, drizzle-valibot modules
- 📂 `examples/`
package with Drizzle ORM usage examples
- 📂 `integration-tests/`
package with all type of tests for each supported database
### Building the project
Run the following script from the root folder to build the whole monorepo. Running it from a specific package folder will only build that package.
```bash
pnpm install && pnpm build
```
## Commit message guidelines
We have specific rules on how commit messages should be structured.
It's important to make sure your commit messages are clear, concise, and informative to make it easier for others to understand the changes you are making.
All commit messages should follow the pattern below:
```
```
Example:
```
Add groupBy error message
In specific case, groupBy was responding with unreadable error
...
```
> [!WARNING]
> All commits should be signed before submitting a PR. Please check the documentation on [how to sign commits](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification).
## Contributing to `drizzle-orm`
### Project structure
- 📂 `pg-core/`, `mysql-core/`, `sqlite-core/`
core packages for each dialect with all the main logic for relation and query builder
- 📂 `sql/`
package containing all expressions and SQL template implementation
- All other folders are for each specific driver that Drizzle ORM supports.
### Running tests
All tests for Drizzle ORM are integration tests that simulate real databases with different queries and responses from each database. Each file in `integration-tests` has a list of different scenarios for different dialects and drivers. Each file creates a Docker container with the needed database and runs the test cases there. After every test is run, the Docker container will be deleted.
If you have added additional logic to a core package, make sure that all tests completed without any failures.
> [!NOTE]
> If you have added data types or a feature for query building, you need to create additional test cases using the new API to ensure it works properly.
If you are in the root of the repository, run all integration tests with the following script:
```bash
cd integration-tests && pnpm test
```
### PR guidelines
1. PR titles should follow the pattern below:
```
[]:
```
Example:
```
[Pg] Add PostGIS extension support
```
2. PRs should contain a detailed description of everything that was changed.
3. Commit messages should follow the [message style guidelines](#commit-message-guidelines).
4. PRs should implement:
- Tests for features that were added.
- Tests for bugs that were fixed.
> [!NOTE]
> To understand how tests should be created and run, please check the [Running tests](#running-tests-orm) section.
## Contributing to `drizzle-kit`
### Project structure
- 📂 `cli/`
- 📄 `schema.ts`
all the commands defined using brocli
- 📂 `commands/`
all the business logic for drizzle-kit commands
- 📂 `extensions/`
all the extension helpers for databases
- 📂 `serializer/`
all the necessary logic to read from the Drizzle ORM schema and convert it to a common JSON format, as well as the logic to introspect all tables, types, and other database elements and convert them to a common JSON format
- 📄 `introspect-pg.ts`, `introspect-mysql.ts`, `introspect-sqlite.ts`
these files are responsible for mapping JSON snapshots to TypeScript files during introspect commands
- 📄 `snapshotsDiffer.ts`
this file handles the mapping from JSON snapshot format to JSON statement objects.
- 📄 `jsonStatements.ts`
this file defines JSON statement types, interfaces, and helper functions.
- 📄 `sqlgenerator.ts`
this file converts JSON statements to SQL strings.
### Running tests
All tests for Drizzle Kit are integration tests that simulate real databases with different queries and responses from each database. Each file in `drizzle-kit/tests` has a list of different scenarios for different commands. Each file creates a Docker container with the needed database and runs the test cases there. After every test is run, the Docker container will be deleted. We test MySQL, PostgreSQL (using PGlite), and SQLite.
If you are in the root of the repository, run all Drizzle Kit tests with the following script:
```bash
cd drizzle-kit && pnpm test
```
### PR guidelines
1. PR titles should follow the pattern below:
```
[-kit]:
```
Example:
```
[Pg-kit] Add PostGIS extension support
```
2. PRs should contain a detailed description of everything that was changed.
3. Commit messages should follow the [message style guidelines](#commit-message-guidelines).
4. PRs should implement:
- Tests for features that were added.
- Tests for bugs that were fixed.
> [!NOTE]
> To understand how tests should be created and run, please check the [Running tests](#running-tests-kit) section.
================================================
FILE: LICENSE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: README.md
================================================
### What's Drizzle?
Drizzle is a modern TypeScript ORM developers [wanna use in their next project](https://stateofdb.com/tools/drizzle).
It is [lightweight](https://bundlephobia.com/package/drizzle-orm) at only ~7.4kb minified+gzipped, and it's tree shakeable with exactly 0 dependencies.
**Drizzle supports every PostgreSQL, MySQL and SQLite database**, including serverless ones like [Turso](https://orm.drizzle.team/docs/get-started-sqlite#turso), [Neon](https://orm.drizzle.team/docs/get-started-postgresql#neon), [Xata](https://orm.drizzle.team/docs/connect-xata), [PlanetScale](https://orm.drizzle.team/docs/get-started-mysql#planetscale), [Cloudflare D1](https://orm.drizzle.team/docs/get-started-sqlite#cloudflare-d1), [FlyIO LiteFS](https://fly.io/docs/litefs/), [Vercel Postgres](https://orm.drizzle.team/docs/get-started-postgresql#vercel-postgres), [Supabase](https://orm.drizzle.team/docs/get-started-postgresql#supabase) and [AWS Data API](https://orm.drizzle.team/docs/get-started-postgresql#aws-data-api). No bells and whistles, no Rust binaries, no serverless adapters, everything just works out of the box.
**Drizzle is serverless-ready by design**. It works in every major JavaScript runtime like NodeJS, Bun, Deno, Cloudflare Workers, Supabase functions, any Edge runtime, and even in browsers.
With Drizzle you can be [**fast out of the box**](https://orm.drizzle.team/benchmarks) and save time and costs while never introducing any data proxies into your infrastructure.
While you can use Drizzle as a JavaScript library, it shines with TypeScript. It lets you [**declare SQL schemas**](https://orm.drizzle.team/docs/sql-schema-declaration) and build both [**relational**](https://orm.drizzle.team/docs/rqb) and [**SQL-like queries**](https://orm.drizzle.team/docs/select), while keeping the balance between type-safety and extensibility for toolmakers to build on top.
### Ecosystem
While Drizzle ORM remains a thin typed layer on top of SQL, we made a set of tools for people to have best possible developer experience.
Drizzle comes with a powerful [**Drizzle Kit**](https://orm.drizzle.team/kit-docs/overview) CLI companion for you to have hassle-free migrations. It can generate SQL migration files for you or apply schema changes directly to the database.
We also have [**Drizzle Studio**](https://orm.drizzle.team/drizzle-studio/overview) for you to effortlessly browse and manipulate data in your database of choice.
### Documentation
Check out the full documentation on [the website](https://orm.drizzle.team/docs/overview).
### Our sponsors ❤️
================================================
FILE: SECURITY.md
================================================
# Security Policy
## Reporting a Vulnerability
If you have a security issue to report, please contact us at [security@drizzle.team](mailto:security@drizzle.team).
================================================
FILE: changelogs/README.md
================================================
# Release flow
- Push feature branch
- GitHub workflow publishes new feature tag to NPM
- Bump package versions manually
- (Optional) Create and merge PR to beta
- (Optional) GitHub workflow publishes new beta version to NPM
- Create PR to main
- TODO: GitHub workflow checks if changelog is present for every package version
- Resolve all conflicts, bump versions if necessary
- Merge PR
- GitHub workflow publishes new latest version to NPM and removes feature tag from NPM
================================================
FILE: changelogs/drizzle-arktype/0.1.2.md
================================================
`drizzle-arktype` is a plugin for [Drizzle ORM](https://github.com/drizzle-team/drizzle-orm) that allows you to generate [arktype](https://arktype.io/) schemas from Drizzle ORM schemas.
**Features**
- Create a select schema for tables, views and enums.
- Create insert and update schemas for tables.
- Supports all dialects: PostgreSQL, MySQL and SQLite.
# Usage
```ts
import { pgEnum, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core';
import { createInsertSchema, createSelectSchema } from 'drizzle-arktype';
import { type } from 'arktype';
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
email: text('email').notNull(),
role: text('role', { enum: ['admin', 'user'] }).notNull(),
createdAt: timestamp('created_at').notNull().defaultNow(),
});
// Schema for inserting a user - can be used to validate API requests
const insertUserSchema = createInsertSchema(users);
// Schema for updating a user - can be used to validate API requests
const updateUserSchema = createUpdateSchema(users);
// Schema for selecting a user - can be used to validate API responses
const selectUserSchema = createSelectSchema(users);
// Overriding the fields
const insertUserSchema = createInsertSchema(users, {
role: type('string'),
});
// Refining the fields - useful if you want to change the fields before they become nullable/optional in the final schema
const insertUserSchema = createInsertSchema(users, {
id: (schema) => schema.atLeast(1),
role: type('string'),
});
// Usage
const isUserValid = parse(insertUserSchema, {
name: 'John Doe',
email: 'johndoe@test.com',
role: 'admin',
});
```
thanks @L-Mario564
================================================
FILE: changelogs/drizzle-arktype/0.1.3.md
================================================
- TS language server performance improvements
- Fixed [Buffer is not defined using drizzle-arktype client side with vite](https://github.com/drizzle-team/drizzle-orm/issues/4383)
- Fixed [[BUG]: drizzle-arktype Buffer is undefined](https://github.com/drizzle-team/drizzle-orm/issues/4371)
================================================
FILE: changelogs/drizzle-kit/0.23.2.md
================================================
- Fixed a bug in PostgreSQL with push and introspect where the `schemaFilter` object was passed. It was detecting enums even in schemas that were not defined in the schemaFilter.
- Fixed the `drizzle-kit up` command to work as expected, starting from the sequences release.
================================================
FILE: changelogs/drizzle-kit/0.24.0.md
================================================
## Breaking changes (for SQLite users)
#### Fixed [Composite primary key order is not consistent](https://github.com/drizzle-team/drizzle-kit-mirror/issues/342) by removing `sort` in SQLite and to be consistant with the same logic in PostgreSQL and MySQL
The issue that may arise for SQLite users with any driver using composite primary keys is that the order in the database may differ from the Drizzle schema.
- If you are using `push`, you **MAY** be prompted to update your table with a new order of columns in the composite primary key. You will need to either change it manually in the database or push the changes, but this may lead to data loss, etc.
- If you are using `generate`, you **MAY** also be prompted to update your table with a new order of columns in the composite primary key. You can either keep that migration or skip it by emptying the SQL migration file.
If nothing works for you and you are blocked, please reach out to me @AndriiSherman. I will try to help you!
## Bug fixes
- [[BUG] When using double type columns, import is not inserted](https://github.com/drizzle-team/drizzle-kit-mirror/issues/403) - thanks @Karibash
- [[BUG] A number value is specified as the default for a column of type char](https://github.com/drizzle-team/drizzle-kit-mirror/issues/404) - thanks @Karibash
- [[BUG]: Array default in migrations are wrong](https://github.com/drizzle-team/drizzle-orm/issues/2621) - thanks @L-Mario564
- [[FEATURE]: Simpler default array fields](https://github.com/drizzle-team/drizzle-orm/issues/2709) - thanks @L-Mario564
- [[BUG]: drizzle-kit generate succeeds but generates invalid SQL for default([]) - Postgres](https://github.com/drizzle-team/drizzle-orm/issues/2432) - thanks @L-Mario564
- [[BUG]: Incorrect type for array column default value](https://github.com/drizzle-team/drizzle-orm/issues/2334) - thanks @L-Mario564
- [[BUG]: error: column is of type integer[] but default expression is of type integer](https://github.com/drizzle-team/drizzle-orm/issues/2224) - thanks @L-Mario564
- [[BUG]: Default value in array generating wrong migration file](https://github.com/drizzle-team/drizzle-orm/issues/1003) - thanks @L-Mario564
- [[BUG]: enum as array, not possible?](https://github.com/drizzle-team/drizzle-orm/issues/1564) - thanks @L-Mario564
================================================
FILE: changelogs/drizzle-kit/0.24.1.md
================================================
## Bug fixes
> Big thanks to @L-Mario564 for his [PR](https://github.com/drizzle-team/drizzle-orm/pull/2804). It conflicted in most cases with a PR that was merged, but we incorporated some of his logic. Merging it would have caused more problems and taken more time to resolve, so we just took a few things from his PR, like removing "::" mappings in introspect and some array type default handlers
### What was fixed
1. The Drizzle Kit CLI was not working properly for the `introspect` command.
2. Added the ability to use column names with special characters for all dialects.
3. Included PostgreSQL sequences in the introspection process.
4. Reworked array type introspection and added all test cases.
5. Fixed all (we hope) default issues in PostgreSQL, where `::` was included in the introspected output.
6. `preserve` casing option was broken
### Tickets that were closed
- [[BUG]: invalid schema generation with drizzle-kit introspect:pg](https://github.com/drizzle-team/drizzle-orm/issues/1210)
- [[BUG][mysql introspection]: TS error when introspect column including colon](https://github.com/drizzle-team/drizzle-orm/issues/1928)
- [[BUG]: Unhandled defaults when introspecting postgres db](https://github.com/drizzle-team/drizzle-orm/issues/1625)
- [[BUG]: PostgreSQL Enum Naming and Schema Typing Issue](https://github.com/drizzle-team/drizzle-orm/issues/2315)
- [[BUG]: drizzle-kit instrospect command generates syntax error on varchar column types](https://github.com/drizzle-team/drizzle-orm/issues/2714)
- [[BUG]: Introspecting varchar[] type produces syntactically invalid schema.ts](https://github.com/drizzle-team/drizzle-orm/issues/1633)
- [[BUG]: introspect:pg column not using generated enum name](https://github.com/drizzle-team/drizzle-orm/issues/1648)
- [[BUG]: drizzle-kit introspect casing "preserve" config not working](https://github.com/drizzle-team/drizzle-orm/issues/2773)
- [[BUG]: drizzle-kit introspect fails on required param that is defined](https://github.com/drizzle-team/drizzle-orm/issues/2719)
- [[BUG]: Error when running npx drizzle-kit introspect: "Expected object, received string"](https://github.com/drizzle-team/drizzle-orm/issues/2657)
- [[BUG]: Missing index names when running introspect command [MYSQL]](https://github.com/drizzle-team/drizzle-orm/issues/2525)
- [[BUG]: drizzle-kit introspect TypeError: Cannot read properties of undefined (reading 'toLowerCase')](https://github.com/drizzle-team/drizzle-orm/issues/2338)
- [[BUG]: Wrong column name when using PgEnum.array()](https://github.com/drizzle-team/drizzle-orm/issues/2100)
- [[BUG]: Incorrect Schema Generated when introspecting extisting pg database](https://github.com/drizzle-team/drizzle-orm/issues/1985)
- [[⚠️🐞BUG]: index() missing argument after introspection, causes tsc error that fails the build](https://github.com/drizzle-team/drizzle-orm/issues/1870)
- [[BUG]: drizzle-kit introspect small errors](https://github.com/drizzle-team/drizzle-orm/issues/1738)
- [[BUG]: Missing bigint import in drizzle-kit introspect](https://github.com/drizzle-team/drizzle-orm/issues/1020)
================================================
FILE: changelogs/drizzle-kit/0.24.2.md
================================================
## New Features
### 🎉 Support for `pglite` driver
You can now use pglite with all drizzle-kit commands, including Drizzle Studio!
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "postgresql",
driver: "pglite",
schema: "./schema.ts",
dbCredentials: {
url: "local-pg.db",
},
verbose: true,
strict: true,
});
```
## Bug fixes
- mysql-kit: fix GENERATED ALWAYS AS ... NOT NULL - [#2824](https://github.com/drizzle-team/drizzle-orm/pull/2824)
================================================
FILE: changelogs/drizzle-kit/0.25.0.md
================================================
## Breaking changes and migrate guide for Turso users
If you are using Turso and libsql, you will need to upgrade your `drizzle.config` and `@libsql/client` package.
1. This version of drizzle-orm will only work with `@libsql/client@0.10.0` or higher if you are using the `migrate` function. For other use cases, you can continue using previous versions(But the suggestion is to upgrade)
To install the latest version, use the command:
```bash
npm i @libsql/client@latest
```
2. Previously, we had a common `drizzle.config` for SQLite and Turso users, which allowed a shared strategy for both dialects. Starting with this release, we are introducing the turso dialect in drizzle-kit. We will evolve and improve Turso as a separate dialect with its own migration strategies.
**Before**
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "sqlite",
schema: "./schema.ts",
out: "./drizzle",
dbCredentials: {
url: "database.db",
},
breakpoints: true,
verbose: true,
strict: true,
});
```
**After**
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "turso",
schema: "./schema.ts",
out: "./drizzle",
dbCredentials: {
url: "database.db",
},
breakpoints: true,
verbose: true,
strict: true,
});
```
If you are using only SQLite, you can use `dialect: "sqlite"`
## LibSQL/Turso and Sqlite migration updates
### SQLite "generate" and "push" statements updates
Starting from this release, we will no longer generate comments like this:
```sql
'/*\n SQLite does not support "Changing existing column type" out of the box, we do not generate automatic migration for that, so it has to be done manually'
+ '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
+ '\n https://www.sqlite.org/lang_altertable.html'
+ '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
+ "\n\n Due to that we don't generate migration automatically and it has to be done manually"
+ '\n*/'
```
We will generate a set of statements, and you can decide if it's appropriate to create data-moving statements instead. Here is an example of the SQL file you'll receive now:
```sql
PRAGMA foreign_keys=OFF;
--> statement-breakpoint
CREATE TABLE `__new_worker` (
`id` integer PRIMARY KEY NOT NULL,
`name` text NOT NULL,
`salary` text NOT NULL,
`job_id` integer,
FOREIGN KEY (`job_id`) REFERENCES `job`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
INSERT INTO `__new_worker`("id", "name", "salary", "job_id") SELECT "id", "name", "salary", "job_id" FROM `worker`;
--> statement-breakpoint
DROP TABLE `worker`;
--> statement-breakpoint
ALTER TABLE `__new_worker` RENAME TO `worker`;
--> statement-breakpoint
PRAGMA foreign_keys=ON;
```
### LibSQL/Turso "generate" and "push" statements updates
Since LibSQL supports more ALTER statements than SQLite, we can generate more statements without recreating your schema and moving all the data, which can be potentially dangerous for production environments.
LibSQL and Turso will now have a separate dialect in the Drizzle config file, meaning that we will evolve Turso and LibSQL independently from SQLite and will aim to support as many features as Turso/LibSQL offer.
With the updated LibSQL migration strategy, you will have the ability to:
- **Change Data Type**: Set a new data type for existing columns.
- **Set and Drop Default Values**: Add or remove default values for existing columns.
- **Set and Drop NOT NULL**: Add or remove the NOT NULL constraint on existing columns.
- **Add References to Existing Columns**: Add foreign key references to existing columns
You can find more information in the [LibSQL documentation](https://github.com/tursodatabase/libsql/blob/main/libsql-sqlite3/doc/libsql_extensions.md#altering-columns)
### LIMITATIONS
- Dropping or altering an index will cause table recreation.
This is because LibSQL/Turso does not support dropping this type of index.
```sql
CREATE TABLE `users` (
`id` integer NOT NULL,
`name` integer,
`age` integer PRIMARY KEY NOT NULL
FOREIGN KEY (`name`) REFERENCES `users1`("id") ON UPDATE no action ON DELETE no action
);
```
- If the table has indexes, altering columns will cause table recreation.
- Drizzle-Kit will drop the indexes, modify the columns, and then recreate the indexes.
- Adding or dropping composite foreign keys is not supported and will cause table recreation
### NOTES
- You can create a reference on any column type, but if you want to insert values, the referenced column must have a unique index or primary key.
```sql
CREATE TABLE parent(a PRIMARY KEY, b UNIQUE, c, d, e, f);
CREATE UNIQUE INDEX i1 ON parent(c, d);
CREATE INDEX i2 ON parent(e);
CREATE UNIQUE INDEX i3 ON parent(f COLLATE nocase);
CREATE TABLE child1(f, g REFERENCES parent(a)); -- Ok
CREATE TABLE child2(h, i REFERENCES parent(b)); -- Ok
CREATE TABLE child3(j, k, FOREIGN KEY(j, k) REFERENCES parent(c, d)); -- Ok
CREATE TABLE child4(l, m REFERENCES parent(e)); -- Error!
CREATE TABLE child5(n, o REFERENCES parent(f)); -- Error!
CREATE TABLE child6(p, q, FOREIGN KEY(p, q) REFERENCES parent(b, c)); -- Error!
CREATE TABLE child7(r REFERENCES parent(c)); -- Error!
```
> **NOTE**: The foreign key for the table child5 is an error because, although the parent key column has a unique index, the index uses a different collating sequence.
See more: https://www.sqlite.org/foreignkeys.html
## New `casing` param in `drizzle-orm` and `drizzle-kit`
There are more improvements you can make to your schema definition. The most common way to name your variables in a database and in TypeScript code is usually `snake_case` in the database and `camelCase` in the code. For this case, in Drizzle, you can now define a naming strategy in your database to help Drizzle map column keys automatically. Let's take a table from the previous example and make it work with the new casing API in Drizzle
Table can now become:
```ts
import { pgTable } from "drizzle-orm/pg-core";
export const ingredients = pgTable("ingredients", (t) => ({
id: t.uuid().defaultRandom().primaryKey(),
name: t.text().notNull(),
description: t.text(),
inStock: t.boolean().default(true),
}));
```
As you can see, `inStock` doesn't have a database name alias, but by defining the casing configuration at the connection level, all queries will automatically map it to `snake_case`
```ts
const db = await drizzle('node-postgres', { connection: '', casing: 'snake_case' })
```
For `drizzle-kit` migrations generation you should also specify `casing` param in drizzle config, so you can be sure you casing strategy will be applied to drizzle-kit as well
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "postgresql",
schema: "./schema.ts",
dbCredentials: {
url: "postgresql://postgres:password@localhost:5432/db",
},
casing: "snake_case",
});
```
================================================
FILE: changelogs/drizzle-kit/0.26.0.md
================================================
# New Features
## Checks support in `drizzle-kit`
You can use drizzle-kit to manage your `check` constraint defined in drizzle-orm schema definition
For example current drizzle table:
```ts
import { sql } from "drizzle-orm";
import { check, pgTable } from "drizzle-orm/pg-core";
export const users = pgTable(
"users",
(c) => ({
id: c.uuid().defaultRandom().primaryKey(),
username: c.text().notNull(),
age: c.integer(),
}),
(table) => ({
checkConstraint: check("age_check", sql`${table.age} > 21`),
})
);
```
will be generated into
```sql
CREATE TABLE IF NOT EXISTS "users" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"username" text NOT NULL,
"age" integer,
CONSTRAINT "age_check" CHECK ("users"."age" > 21)
);
```
The same is supported in all dialects
### Limitations
- `generate` will work as expected for all check constraint changes.
- `push` will detect only check renames and will recreate the constraint. All other changes to SQL won't be detected and will be ignored.
So, if you want to change the constraint's SQL definition using only `push`, you would need to manually comment out the constraint, `push`, then put it back with the new SQL definition and `push` one more time.
## Views support in `drizzle-kit`
You can use drizzle-kit to manage your `views` defined in drizzle-orm schema definition. It will work with all existing dialects and view options
### PostgreSQL
For example current drizzle table:
```ts
import { sql } from "drizzle-orm";
import {
check,
pgMaterializedView,
pgTable,
pgView,
} from "drizzle-orm/pg-core";
export const users = pgTable(
"users",
(c) => ({
id: c.uuid().defaultRandom().primaryKey(),
username: c.text().notNull(),
age: c.integer(),
}),
(table) => ({
checkConstraint: check("age_check", sql`${table.age} > 21`),
})
);
export const simpleView = pgView("simple_users_view").as((qb) =>
qb.select().from(users)
);
export const materializedView = pgMaterializedView(
"materialized_users_view"
).as((qb) => qb.select().from(users));
```
will be generated into
```sql
CREATE TABLE IF NOT EXISTS "users" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"username" text NOT NULL,
"age" integer,
CONSTRAINT "age_check" CHECK ("users"."age" > 21)
);
CREATE VIEW "public"."simple_users_view" AS (select "id", "username", "age" from "users");
CREATE MATERIALIZED VIEW "public"."materialized_users_view" AS (select "id", "username", "age" from "users");
```
Views supported in all dialects, but materialized views are supported only in PostgreSQL
#### Limitations
- `generate` will work as expected for all view changes
- `push` limitations:
1. If you want to change the view's SQL definition using only `push`, you would need to manually comment out the view, `push`, then put it back with the new SQL definition and `push` one more time.
## Updates for PostgreSQL enums behavior
We've updated enum behavior in Drizzle with PostgreSQL:
- Add value after or before in enum: With this change, Drizzle will now respect the order of values in the enum and allow adding new values after or before a specific one.
- Support for dropping a value from an enum: In this case, Drizzle will attempt to alter all columns using the enum to text, then drop the existing enum and create a new one with the updated set of values. After that, all columns previously using the enum will be altered back to the new enum.
> If the deleted enum value was used by a column, this process will result in a database error.
- Support for dropping an enum
- Support for moving enums between schemas
- Support for renaming enums
================================================
FILE: changelogs/drizzle-kit/0.26.1.md
================================================
- Fix `data is malformed` for views
================================================
FILE: changelogs/drizzle-kit/0.26.2.md
================================================
- Updated internal versions for the drizzle-kit and drizzle-orm packages. Changes were introduced in the last minor release, and you are required to upgrade both packages to ensure they work as expected
================================================
FILE: changelogs/drizzle-kit/0.27.0.md
================================================
> This version of `drizzle-jit` requires `drizzle-orm@0.36.0` to enable all new features
# New Features
## Row-Level Security (RLS)
With Drizzle, you can enable Row-Level Security (RLS) for any Postgres table, create policies with various options, and define and manage the roles those policies apply to.
Drizzle supports a raw representation of Postgres policies and roles that can be used in any way you want. This works with popular Postgres database providers such as `Neon` and `Supabase`.
In Drizzle, we have specific predefined RLS roles and functions for RLS with both database providers, but you can also define your own logic.
### Enable RLS
If you just want to enable RLS on a table without adding policies, you can use `.enableRLS()`
As mentioned in the PostgreSQL documentation:
> If no policy exists for the table, a default-deny policy is used, meaning that no rows are visible or can be modified.
Operations that apply to the whole table, such as TRUNCATE and REFERENCES, are not subject to row security.
```ts
import { integer, pgTable } from 'drizzle-orm/pg-core';
export const users = pgTable('users', {
id: integer(),
}).enableRLS();
```
> If you add a policy to a table, RLS will be enabled automatically. So, there’s no need to explicitly enable RLS when adding policies to a table.
### Roles
Currently, Drizzle supports defining roles with a few different options, as shown below. Support for more options will be added in a future release.
```ts
import { pgRole } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin', { createRole: true, createDb: true, inherit: true });
```
If a role already exists in your database, and you don’t want drizzle-kit to ‘see’ it or include it in migrations, you can mark the role as existing.
```ts
import { pgRole } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin').existing();
```
### Policies
To fully leverage RLS, you can define policies within a Drizzle table.
> In PostgreSQL, policies should be linked to an existing table. Since policies are always associated with a specific table, we decided that policy definitions should be defined as a parameter of `pgTable`
**Example of pgPolicy with all available properties**
```ts
import { sql } from 'drizzle-orm';
import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
pgPolicy('policy', {
as: 'permissive',
to: admin,
for: 'delete',
using: sql``,
withCheck: sql``,
}),
]);
```
**Link Policy to an existing table**
There are situations where you need to link a policy to an existing table in your database.
The most common use case is with database providers like `Neon` or `Supabase`, where you need to add a policy
to their existing tables. In this case, you can use the `.link()` API
```ts
import { sql } from "drizzle-orm";
import { pgPolicy } from "drizzle-orm/pg-core";
import { authenticatedRole, realtimeMessages } from "drizzle-orm/supabase";
export const policy = pgPolicy("authenticated role insert policy", {
for: "insert",
to: authenticatedRole,
using: sql``,
}).link(realtimeMessages);
```
### Migrations
If you are using drizzle-kit to manage your schema and roles, there may be situations where you want to refer to roles that are not defined in your Drizzle schema. In such cases, you may want drizzle-kit to skip managing these roles without having to define each role in your drizzle schema and marking it with `.existing()`.
In these cases, you can use `entities.roles` in `drizzle.config.ts`. For a complete reference, refer to the the [`drizzle.config.ts`](https://orm.drizzle.team/docs/drizzle-config-file) documentation.
By default, `drizzle-kit` does not manage roles for you, so you will need to enable this feature in `drizzle.config.ts`.
```ts {12-14}
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: 'postgresql',
schema: "./drizzle/schema.ts",
dbCredentials: {
url: process.env.DATABASE_URL!
},
verbose: true,
strict: true,
entities: {
roles: true
}
});
```
In case you need additional configuration options, let's take a look at a few more examples.
**You have an `admin` role and want to exclude it from the list of manageable roles**
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
exclude: ['admin']
}
}
});
```
**You have an `admin` role and want to include it in the list of manageable roles**
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
include: ['admin']
}
}
});
```
**If you are using `Neon` and want to exclude Neon-defined roles, you can use the provider option**
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
provider: 'neon'
}
}
});
```
**If you are using `Supabase` and want to exclude Supabase-defined roles, you can use the provider option**
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
provider: 'supabase'
}
}
});
```
> You may encounter situations where Drizzle is slightly outdated compared to new roles specified by your database provider.
In such cases, you can use the `provider` option and `exclude` additional roles:
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
provider: 'supabase',
exclude: ['new_supabase_role']
}
}
});
```
### RLS on views
With Drizzle, you can also specify RLS policies on views. For this, you need to use `security_invoker` in the view's WITH options. Here is a small example:
```ts {5}
...
export const roomsUsersProfiles = pgView("rooms_users_profiles")
.with({
securityInvoker: true,
})
.as((qb) =>
qb
.select({
...getTableColumns(roomsUsers),
email: profiles.email,
})
.from(roomsUsers)
.innerJoin(profiles, eq(roomsUsers.userId, profiles.id))
);
```
### Using with Neon
The Neon Team helped us implement their vision of a wrapper on top of our raw policies API. We defined a specific
`/neon` import with the `crudPolicy` function that includes predefined functions and Neon's default roles.
Here's an example of how to use the `crudPolicy` function:
```ts
import { crudPolicy } from 'drizzle-orm/neon';
import { integer, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
crudPolicy({ role: admin, read: true, modify: false }),
]);
```
This policy is equivalent to:
```ts
import { sql } from 'drizzle-orm';
import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
pgPolicy(`crud-${admin.name}-policy-insert`, {
for: 'insert',
to: admin,
withCheck: sql`false`,
}),
pgPolicy(`crud-${admin.name}-policy-update`, {
for: 'update',
to: admin,
using: sql`false`,
withCheck: sql`false`,
}),
pgPolicy(`crud-${admin.name}-policy-delete`, {
for: 'delete',
to: admin,
using: sql`false`,
}),
pgPolicy(`crud-${admin.name}-policy-select`, {
for: 'select',
to: admin,
using: sql`true`,
}),
]);
```
`Neon` exposes predefined `authenticated` and `anaonymous` roles and related functions. If you are using `Neon` for RLS, you can use these roles, which are marked as existing, and the related functions in your RLS queries.
```ts
// drizzle-orm/neon
export const authenticatedRole = pgRole('authenticated').existing();
export const anonymousRole = pgRole('anonymous').existing();
export const authUid = (userIdColumn: AnyPgColumn) => sql`(select auth.user_id() = ${userIdColumn})`;
```
For example, you can use the `Neon` predefined roles and functions like this:
```ts
import { sql } from 'drizzle-orm';
import { authenticatedRole } from 'drizzle-orm/neon';
import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
pgPolicy(`policy-insert`, {
for: 'insert',
to: authenticatedRole,
withCheck: sql`false`,
}),
]);
```
### Using with Supabase
We also have a `/supabase` import with a set of predefined roles marked as existing, which you can use in your schema.
This import will be extended in a future release with more functions and helpers to make using RLS and `Supabase` simpler.
```ts
// drizzle-orm/supabase
export const anonRole = pgRole('anon').existing();
export const authenticatedRole = pgRole('authenticated').existing();
export const serviceRole = pgRole('service_role').existing();
export const postgresRole = pgRole('postgres_role').existing();
export const supabaseAuthAdminRole = pgRole('supabase_auth_admin').existing();
```
For example, you can use the `Supabase` predefined roles like this:
```ts
import { sql } from 'drizzle-orm';
import { serviceRole } from 'drizzle-orm/supabase';
import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
pgPolicy(`policy-insert`, {
for: 'insert',
to: serviceRole,
withCheck: sql`false`,
}),
]);
```
The `/supabase` import also includes predefined tables and functions that you can use in your application
```ts
// drizzle-orm/supabase
const auth = pgSchema('auth');
export const authUsers = auth.table('users', {
id: uuid().primaryKey().notNull(),
});
const realtime = pgSchema('realtime');
export const realtimeMessages = realtime.table(
'messages',
{
id: bigserial({ mode: 'bigint' }).primaryKey(),
topic: text().notNull(),
extension: text({
enum: ['presence', 'broadcast', 'postgres_changes'],
}).notNull(),
},
);
export const authUid = sql`(select auth.uid())`;
export const realtimeTopic = sql`realtime.topic()`;
```
This allows you to use it in your code, and Drizzle Kit will treat them as existing databases,
using them only as information to connect to other entities
```ts
import { foreignKey, pgPolicy, pgTable, text, uuid } from "drizzle-orm/pg-core";
import { sql } from "drizzle-orm/sql";
import { authenticatedRole, authUsers } from "drizzle-orm/supabase";
export const profiles = pgTable(
"profiles",
{
id: uuid().primaryKey().notNull(),
email: text().notNull(),
},
(table) => [
foreignKey({
columns: [table.id],
// reference to the auth table from Supabase
foreignColumns: [authUsers.id],
name: "profiles_id_fk",
}).onDelete("cascade"),
pgPolicy("authenticated can view all profiles", {
for: "select",
// using predefined role from Supabase
to: authenticatedRole,
using: sql`true`,
}),
]
);
```
Let's check an example of adding a policy to a table that exists in `Supabase`
```ts
import { sql } from "drizzle-orm";
import { pgPolicy } from "drizzle-orm/pg-core";
import { authenticatedRole, realtimeMessages } from "drizzle-orm/supabase";
export const policy = pgPolicy("authenticated role insert policy", {
for: "insert",
to: authenticatedRole,
using: sql``,
}).link(realtimeMessages);
```
# Bug fixes
- [[BUG]: Studio + mysql default mode, wrong format related timezone](https://github.com/drizzle-team/drizzle-orm/issues/2747)
- [[BUG]: Drizzle Studio CORS error](https://github.com/drizzle-team/drizzle-orm/issues/1857)
- [[BUG]: TIMESTAMPS showing up incorrectly on drizzle studio](https://github.com/drizzle-team/drizzle-orm/issues/2549)
================================================
FILE: changelogs/drizzle-kit/0.27.1.md
================================================
- Fix: [[BUG]: When using RLS policies and Views, the view is the last clause generated](https://github.com/drizzle-team/drizzle-orm/issues/3378)
================================================
FILE: changelogs/drizzle-kit/0.27.2.md
================================================
- Fix [[BUG]: Undefined properties when using drizzle-kit push](https://github.com/drizzle-team/drizzle-orm/issues/3391)
- Fix TypeError: Cannot read properties of undefined (reading 'isRLSEnabled')
- Fix push bugs, when pushing a schema with linked policy to a table from `drizzle-orm/supabase`
================================================
FILE: changelogs/drizzle-kit/0.28.0.md
================================================
# Improvements
- Added an OHM static imports checker to identify unexpected imports within a chain of imports in the drizzle-kit repo. For example, it checks if drizzle-orm is imported before drizzle-kit and verifies if the drizzle-orm import is available in your project.
- [Adding more columns to Supabase auth.users table schema](https://github.com/drizzle-team/drizzle-orm/issues/3327) - thanks @nicholasdly
# Bug Fixes
- [[BUG]: [drizzle-kit]: Fix breakpoints option cannot be disabled](https://github.com/drizzle-team/drizzle-orm/issues/2828) - thanks @klotztech
- [[BUG]: drizzle-kit introspect: SMALLINT import missing and incorrect DECIMAL UNSIGNED handling](https://github.com/drizzle-team/drizzle-orm/issues/2950) - thanks @L-Mario564
- [Unsigned tinyints preventing migrations](https://github.com/drizzle-team/drizzle-orm/issues/1571) - thanks @L-Mario564
- [[BUG]: Can't parse float(8,2) from database (precision and scale and/or unsigned breaks float types)](https://github.com/drizzle-team/drizzle-orm/issues/3285) - thanks @L-Mario564
- [[BUG]: PgEnum generated migration doesn't escape single quotes](https://github.com/drizzle-team/drizzle-orm/issues/1272) - thanks @L-Mario564
- [[BUG]: single quote not escaped correctly in migration file](https://github.com/drizzle-team/drizzle-orm/issues/2184) - thanks @L-Mario564
- [[BUG]: Migrations does not escape single quotes](https://github.com/drizzle-team/drizzle-orm/issues/1765) - thanks @L-Mario564
- [[BUG]: Issue with quoted default string values](https://github.com/drizzle-team/drizzle-orm/issues/2122) - thanks @L-Mario564
- [[BUG]: SQl commands in wrong roder](https://github.com/drizzle-team/drizzle-orm/issues/2390) - thanks @L-Mario564
- [[BUG]: Time with precision in drizzle-orm/pg-core adds double-quotes around type](https://github.com/drizzle-team/drizzle-orm/issues/1804) - thanks @L-Mario564
- [[BUG]: Postgres push fails due to lack of quotes](https://github.com/drizzle-team/drizzle-orm/issues/2396) - thanks @L-Mario564
- [[BUG]: TypeError: Cannot read properties of undefined (reading 'compositePrimaryKeys')](https://github.com/drizzle-team/drizzle-orm/issues/2344) - thanks @L-Mario564
- [[BUG]: drizzle-kit introspect generates CURRENT_TIMESTAMP without sql operator on date column](https://github.com/drizzle-team/drizzle-orm/issues/2899) - thanks @L-Mario564
- [[BUG]: Drizzle-kit introspect doesn't pull correct defautl statement](https://github.com/drizzle-team/drizzle-orm/issues/2905) - thanks @L-Mario564
- [[BUG]: Problem on MacBook - This statement does not return data. Use run() instead](https://github.com/drizzle-team/drizzle-orm/issues/2623) - thanks @L-Mario564
- [[BUG]: Enum column names that are used as arrays are not quoted](https://github.com/drizzle-team/drizzle-orm/issues/2598) - thanks @L-Mario564
- [[BUG]: drizzle-kit generate ignores index operators](https://github.com/drizzle-team/drizzle-orm/issues/2935) - thanks @L-Mario564
- [dialect param config error message is wrong](https://github.com/drizzle-team/drizzle-orm/issues/3427) - thanks @L-Mario564
- [[BUG]: Error setting default enum field values](https://github.com/drizzle-team/drizzle-orm/issues/2299) - thanks @L-Mario564
- [[BUG]: drizzle-kit does not respect the order of columns configured in primaryKey()](https://github.com/drizzle-team/drizzle-orm/issues/2326) - thanks @L-Mario564
- [[BUG]: Cannot drop Unique Constraint MySQL](https://github.com/drizzle-team/drizzle-orm/issues/998) - thanks @L-Mario564
================================================
FILE: changelogs/drizzle-kit/0.28.1.md
================================================
# Bug fixes
- Fixed typos in repository: thanks @armandsalle, @masto, @wackbyte, @Asher-JH, @MaxLeiter
- [fix: wrong dialect set in mysql/sqlite introspect](https://github.com/drizzle-team/drizzle-orm/pull/2865)
================================================
FILE: changelogs/drizzle-kit/0.29.0.md
================================================
# New Dialects
### 🎉 `SingleStore` dialect is now available in Drizzle
Thanks to the SingleStore team for creating a PR with all the necessary changes to support the MySQL-compatible part of SingleStore. You can already start using it with Drizzle. The SingleStore team will also help us iterate through updates and make more SingleStore-specific features available in Drizzle
```ts
import 'dotenv/config';
import { defineConfig } from 'drizzle-kit';
export default defineConfig({
dialect: 'singlestore',
out: './drizzle',
schema: './src/db/schema.ts',
dbCredentials: {
url: process.env.DATABASE_URL!,
},
});
```
You can check out our [Getting started guides](https://orm.drizzle.team/docs/get-started/singlestore-new) to try SingleStore!
# New Drivers
### 🎉 `SQLite Durable Objects` driver is now available in Drizzle
You can now query SQLite Durable Objects in Drizzle!
For the full example, please check our [Get Started](https://orm.drizzle.team/docs/get-started/do-new) Section
```ts
import 'dotenv/config';
import { defineConfig } from 'drizzle-kit';
export default defineConfig({
out: './drizzle',
schema: './src/db/schema.ts',
dialect: 'sqlite',
driver: 'durable-sqlite',
});
```
================================================
FILE: changelogs/drizzle-kit/0.29.1.md
================================================
- Fix SingleStore generate migrations command
================================================
FILE: changelogs/drizzle-kit/0.30.0.md
================================================
Starting from this update, the PostgreSQL dialect will align with the behavior of all other dialects. It will no longer include `IF NOT EXISTS`, `$DO`, or similar statements, which could cause incorrect DDL statements to not fail when an object already exists in the database and should actually fail.
This change marks our first step toward several major upgrades we are preparing:
- An updated and improved migration workflow featuring commutative migrations, a revised folder structure, and enhanced collaboration capabilities for migrations.
- Better support for Xata migrations.
- Compatibility with CockroachDB (achieving full compatibility will only require removing serial fields from the migration folder).
================================================
FILE: changelogs/drizzle-kit/0.30.1.md
================================================
# New Features
### `drizzle-kit export`
To make drizzle-kit integration with other migration tools, like Atlas much easier, we've prepared a new command called `export`. It will translate your drizzle schema in SQL representation(DDL) statements and outputs to the console
```ts
// schema.ts
import { pgTable, serial, text } from 'drizzle-orm/pg-core'
export const users = pgTable('users', {
id: serial('id').primaryKey(),
email: text('email').notNull(),
name: text('name')
});
```
Running
```bash
npx drizzle-kit export
```
will output this string to console
```bash
CREATE TABLE "users" (
"id" serial PRIMARY KEY NOT NULL,
"email" text NOT NULL,
"name" text
);
```
By default, the only option for now is `--sql`, so the output format will be SQL DDL statements. In the future, we will support additional output formats to accommodate more migration tools
```bash
npx drizzle-kit export --sql
```
================================================
FILE: changelogs/drizzle-kit/0.30.2.md
================================================
- Fix certificates generation utility for Drizzle Studio; [[BUG]: [drizzle-kit]: drizzle-kit dependency on drizzle-studio perms error](https://github.com/drizzle-team/drizzle-orm/issues/3729)
================================================
FILE: changelogs/drizzle-kit/0.30.3.md
================================================
# SingleStore `push` and `generate` improvements
As SingleStore did not support certain DDL statements before this release, you might encounter an error indicating that some schema changes cannot be applied due to a database issue. Starting from this version, drizzle-kit will detect such cases and initiate table recreation with data transfer between the tables
# Bug fixes
- [[BUG] If the index name is the same as the generated name, it will be empty and a type error will occur](https://github.com/drizzle-team/drizzle-orm/issues/3420)
================================================
FILE: changelogs/drizzle-kit/0.30.4.md
================================================
- Fix bug that generates incorrect syntax when introspect in mysql
- Fix a bug that caused incorrect syntax output when introspect in unsigned columns
================================================
FILE: changelogs/drizzle-kit/0.30.5.md
================================================
# New Features
## Added `Gel` dialect support and `gel-js` client support
Drizzle is getting a new `Gel` dialect with its own types and Gel-specific logic. In this first iteration, almost all query-building features have been copied from the `PostgreSQL` dialect since Gel is fully PostgreSQL-compatible. The only change in this iteration is the data types. The Gel dialect has a different set of available data types, and all mappings for these types have been designed to avoid any extra conversions on Drizzle's side. This means you will insert and select exactly the same data as supported by the Gel protocol.
Drizzle + Gel integration will work only through `drizzle-kit pull`. Drizzle won't support `generate`, `migrate`, or `push` features in this case. Instead, drizzle-kit is used solely to pull the Drizzle schema from the Gel database, which can then be used in your `drizzle-orm` queries.
The Gel + Drizzle workflow:
1. Use the `gel` CLI to manage your schema.
2. Use the `gel` CLI to generate and apply migrations to the database.
3. Use drizzle-kit to pull the Gel database schema into a Drizzle schema.
4. Use drizzle-orm with gel-js to query the Gel database.
On the drizzle-kit side you can now use `dialect: "gel"`
```ts
// drizzle.config.ts
import { defineConfig } from 'drizzle-kit';
export default defineConfig({
dialect: 'gel',
});
```
For a complete Get Started tutorial you can use our new guides:
- [Get Started with Drizzle and Gel in a new project](https://orm.drizzle.team/docs/get-started/gel-new)
- [Get Started with Drizzle and Gel in a existing project](https://orm.drizzle.team/docs/get-started/gel-existing)
================================================
FILE: changelogs/drizzle-kit/0.30.6.md
================================================
### Bug fixes
- [[BUG]: d1 push locally is not working](https://github.com/drizzle-team/drizzle-orm/issues/4099) - thanks @mabels and @RomanNabukhotnyi
- [[BUG] Cloudflare D1: drizzle-kit push is not working (error 7500 SQLITE_AUTH)](https://github.com/drizzle-team/drizzle-orm/issues/3728) - thanks @mabels and @RomanNabukhotnyi
================================================
FILE: changelogs/drizzle-kit/0.31.0.md
================================================
## Features and improvements
### Enum DDL improvements
For situations where you drop an `enum` value or reorder values in an `enum`, there is no native way to do this in PostgreSQL. To handle these cases, `drizzle-kit` used to:
- Change the column data types from the enum to text
- Drop the old enum
- Add the new enum
- Change the column data types back to the new enum
However, there were a few scenarios that weren’t covered: `PostgreSQL` wasn’t updating default expressions for columns when their data types changed
Therefore, for cases where you either change a column’s data type from an `enum` to some other type, drop an `enum` value, or reorder `enum` values, we now do the following:
- Change the column data types from the enum to text
- Set the default using the ::text expression
- Drop the old enum
- Add the new enum
- Change the column data types back to the new enum
- Set the default using the :: expression
### `esbuild` version upgrade
For `drizzle-kit` we upgraded the version to latest (`0.25.2`), thanks @paulmarsicloud
## Bug fixes
- [[BUG]: Error on Malformed Array Literal](https://github.com/drizzle-team/drizzle-orm/issues/2715) - thanks @Kratious
- [[BUG]: Postgres drizzle-kit: Error while pulling indexes from a table with json/jsonb deep field index](https://github.com/drizzle-team/drizzle-orm/issues/2744) - thanks @Kratious
- [goog-vulnz flags CVE-2024-24790 in esbuild 0.19.7](https://github.com/drizzle-team/drizzle-orm/issues/4045)
================================================
FILE: changelogs/drizzle-kit/0.31.1.md
================================================
### Fixed `drizzle-kit pull` bugs when using Gel extensions.
Because Gel extensions create schema names containing `::` (for example, `ext::auth`), Drizzle previously handled these names incorrectly. Starting with this release, you can use Gel extensions without any problems. Here’s what you should do:
1. Enable extensions schemas in `drizzle.config.ts`
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: 'gel',
schemaFilter: ['ext::auth', 'public']
});
```
2. Run `drizzle-kit pull`
3. Done!
================================================
FILE: changelogs/drizzle-kit/0.31.10.md
================================================
- Updated to `hanji@0.0.8` - native bun `stringWidth`, `stripANSI` support, errors for non-TTY environments
- We've migrated away from `esbuild-register` to `tsx` loader, it will now allow to use `drizzle-kit` seamlessly with both `ESM` and `CJS` modules
- We've also added native `Bun` and `Deno` launch support, which will not trigger `tsx` loader and utilise native `bun` and `deno` imports capabilities and faster startup times
================================================
FILE: changelogs/drizzle-kit/0.31.2.md
================================================
### Bug fixes
- Fixed relations extraction to not interfere with Drizzle Studio.
================================================
FILE: changelogs/drizzle-kit/0.31.3.md
================================================
- Internal changes to Studio context. Added `databaseName` and `packageName` properties for Studio
================================================
FILE: changelogs/drizzle-kit/0.31.4.md
================================================
- Fixed `halfvec`, `bit` and `sparsevec` type generation bug in drizzle-kit
================================================
FILE: changelogs/drizzle-kit/0.31.5.md
================================================
- Add casing support to studio configuration and related functions
================================================
FILE: changelogs/drizzle-kit/0.31.6.md
================================================
### Bug fixes
- [[BUG]: Importing drizzle-kit/api fails in ESM modules](https://github.com/drizzle-team/drizzle-orm/issues/2853)
================================================
FILE: changelogs/drizzle-kit/0.31.7.md
================================================
### Bug fixes
- [[BUG]: Drizzle Kit push to Postgres 18 produces unecessary DROP SQL when the schema was NOT changed](https://github.com/drizzle-team/drizzle-orm/issues/4944)
================================================
FILE: changelogs/drizzle-kit/0.31.8.md
================================================
### Bug fixes
- Fixed `algorythm` => `algorithm` typo.
- Fixed external dependencies in build configuration.
================================================
FILE: changelogs/drizzle-kit/0.31.9.md
================================================
- drizzle-kit api improvements for D1 connections
================================================
FILE: changelogs/drizzle-orm/0.12.0-beta.23.md
================================================
# drizzle-orm 0.12.0-beta.23
- Added new row mapping mechanism as `mapResultRowV2`, `mapResultRow` will be replaced by it in the future.
================================================
FILE: changelogs/drizzle-orm/0.12.0-beta.24.md
================================================
# drizzle-orm 0.12.0-beta.24
- Made `.execute()` method public in query builders.
- Added `name()` function for escaping entity names inside queries.
- (internal) Removed old row mapper implementation.
================================================
FILE: changelogs/drizzle-orm/0.13.0.md
================================================
# drizzle-orm 0.13.0
- Release 🎉
================================================
FILE: changelogs/drizzle-orm/0.13.1.md
================================================
# drizzle-orm 0.13.1
- Fix mysql peer dependency range
================================================
FILE: changelogs/drizzle-orm/0.14.1.md
================================================
# drizzle-orm 0.14.1
- Bumped everything to 0.14.1.
================================================
FILE: changelogs/drizzle-orm/0.14.2.md
================================================
# drizzle-orm 0.14.2
- Bumped everything to 0.14.2
================================================
FILE: changelogs/drizzle-orm/0.15.0.md
================================================
# drizzle-orm 0.15.0
- Minor upgrade for all modules, due to adding version for api
- Add internal version for ORM api and npm version
================================================
FILE: changelogs/drizzle-orm/0.15.1.md
================================================
# drizzle-orm 0.15.1
- Add schema symbol to table
- Append schema before table name in SQLWrapper if it exists
================================================
FILE: changelogs/drizzle-orm/0.15.2.md
================================================
# drizzle-orm 0.15.2
Internal release
================================================
FILE: changelogs/drizzle-orm/0.15.3.md
================================================
# drizzle-orm 0.15.3
Internal release
================================================
FILE: changelogs/drizzle-orm/0.16.0.md
================================================
# drizzle-orm 0.16.0
- Bump all packages to 0.16.0
================================================
FILE: changelogs/drizzle-orm/0.16.1.md
================================================
# drizzle-orm 0.16.0
- Fix peer dependency error for >=0.16 drizzle packages
================================================
FILE: changelogs/drizzle-orm/0.17.0.md
================================================
## ❗ All ORM packages are now merged into `drizzle-orm`
Starting from release `0.17.0` and onwards, all dialect-specific packages are merged into `drizzle-orm`. Legacy ORM packages will be archived.
### Import paths changes
#### PostgreSQL
- `import { ... } from 'drizzle-orm-pg'` -> `import { ... } from 'drizzle-orm/pg-core'`
- `import { ... } from 'drizzle-orm-pg/node'` -> `import { ... } from 'drizzle-orm/node-postgres'`
- `import { ... } from 'drizzle-orm-pg/neondb'` -> `import { ... } from 'drizzle-orm/neon'`
- `import { ... } from 'drizzle-orm-pg/postgres.js'` -> `import { ... } from 'drizzle-orm/postgres.js'`
#### MySQL
- `import { ... } from 'drizzle-orm-mysql'` -> `import { ... } from 'drizzle-orm/mysql-core'`
- `import { ... } from 'drizzle-orm-mysql/mysql2'` -> `import { ... } from 'drizzle-orm/mysql2'`
#### SQLite
- `import { ... } from 'drizzle-orm-sqlite'` -> `import { ... } from 'drizzle-orm/sqlite-core'`
- `import { ... } from 'drizzle-orm-sqlite/better-sqlite3'` -> `import { ... } from 'drizzle-orm/better-sqlite3'`
- `import { ... } from 'drizzle-orm-sqlite/d1'` -> `import { ... } from 'drizzle-orm/d1'`
- `import { ... } from 'drizzle-orm-sqlite/bun'` -> `import { ... } from 'drizzle-orm/bun-sqlite'`
- `import { ... } from 'drizzle-orm-sqlite/sql.js'` -> `import { ... } from 'drizzle-orm/sql.js'`
================================================
FILE: changelogs/drizzle-orm/0.17.1.md
================================================
- Added feature showcase section to README
================================================
FILE: changelogs/drizzle-orm/0.17.2.md
================================================
- Fixed package.json require path in 'drizzle-orm/version'
================================================
FILE: changelogs/drizzle-orm/0.17.3.md
================================================
We have released [AWS Data API support](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/data-api.html) for PostgreSQL
---
Connection example
```typescript
import { drizzle, migrate } from 'drizzle-orm/aws-data-api/pg';
const rdsClient = new RDSDataClient({});
const db = drizzle(rdsClient, {
database: '',
secretArn: '',
resourceArn: '',
});
await migrate(db, { migrationsFolder: '' });
```
> **Note**:
> All drizzle pg data types are working well with data api, except of `interval`. This type is not yet mapped in proper way
================================================
FILE: changelogs/drizzle-orm/0.17.4.md
================================================
We have released [SQLite Proxy Driver](https://github.com/drizzle-team/drizzle-orm/tree/main/examples/sqlite-proxy)
---
Perfect way to setup custom logic for database calls instead of predefined drivers
Should work well with serverless apps 🚀
```typescript
// Custom Proxy HTTP driver
const db = drizzle(async (sql, params, method) => {
try {
const rows = await axios.post('http://localhost:3000/query', { sql, params, method });
return { rows: rows.data };
} catch (e: any) {
console.error('Error from sqlite proxy server: ', e.response.data)
return { rows: [] };
}
});
```
> For more example you can check [full documentation](https://github.com/drizzle-team/drizzle-orm/tree/main/examples/sqlite-proxy)
================================================
FILE: changelogs/drizzle-orm/0.17.5.md
================================================
We have released [Planetscale Serverless](https://github.com/planetscale/database-js) driver support
---
Usage example:
```typescript
import { drizzle } from 'drizzle-orm/planetscale-serverless';
import { connect } from '@planetscale/database';
// create the connection
const connection = connect({
host: process.env['DATABASE_HOST'],
username: process.env['DATABASE_USERNAME'],
password: process.env['DATABASE_PASSWORD'],
});
const db = drizzle(connection);
```
================================================
FILE: changelogs/drizzle-orm/0.17.6.md
================================================
Fix circular dependency for query building on all pg and mysql drivers
Moved all aws data api typings specific logic to dialect from sql to prevent circular dependency issues
================================================
FILE: changelogs/drizzle-orm/0.17.7.md
================================================
- Fix [#158](https://github.com/drizzle-team/drizzle-orm/issues/158) issue. Method `.returning()` was working incorrectly with `.get()` method in sqlite dialect
- Fix SQLite Proxy driver mapping bug
- Add test cases for SQLite Proxy driver
- Add additional example for SQLite Proxy Server setup to handle `.get()` as well
================================================
FILE: changelogs/drizzle-orm/0.18.0.md
================================================
- Improved join result types for partial selects (refer to the [docs](/docs/joins.md) page for more information)
- Renamed import paths for Postgres.js and SQL.js drivers to avoid bundling errors:
- `drizzle-orm/postgres.js` -> `drizzle-orm/postgres-js`
- `drizzle-orm/sql.js` -> `drizzle-orm/sql-js`
================================================
FILE: changelogs/drizzle-orm/0.19.0.md
================================================
- Implemented selecting and joining a subquery. Example usage:
```ts
const sq = db
.select({
categoryId: courseCategoriesTable.id,
category: courseCategoriesTable.name,
total: sql`count(${courseCategoriesTable.id})`.as(),
})
.from(courseCategoriesTable)
.groupBy(courseCategoriesTable.id, courseCategoriesTable.name)
.subquery('sq');
```
After that, just use the subquery instead of a table as usual.
- ❗ Replaced `db.select(table).fields({ ... })` syntax with `db.select({ ... }).from(table)` to look more like its SQL counterpart.
================================================
FILE: changelogs/drizzle-orm/0.19.1.md
================================================
## Changelog
---
- Add `char` data type support for postgresql by @AlexandrLi in [#177](https://github.com/drizzle-team/drizzle-orm/pull/177)
- Adding new section with `New Contributors` for release notes. Took this template from [bun](https://github.com/oven-sh/bun) release notes pattern
## New Contributors
---
- @AlexandrLi made their first contribution in [#177](https://github.com/drizzle-team/drizzle-orm/pull/177)
================================================
FILE: changelogs/drizzle-orm/0.20.0.md
================================================
- 🎉 **Implemented support for WITH clause ([docs](/drizzle-orm/src/pg-core/README.md#with-clause)). Example usage:**
```ts
const sq = db
.select()
.from(users)
.prepareWithSubquery('sq');
const result = await db
.with(sq)
.select({
id: sq.id,
name: sq.name,
total: sql`count(${sq.id})::int`(),
})
.from(sq)
.groupBy(sq.id, sq.name);
```
- 🐛 Fixed various bugs with selecting/joining of subqueries.
- ❗ Renamed `.subquery('alias')` to `.as('alias')`.
- ❗ ``sql`query`.as()`` is now ``sql`query`()``. Old syntax is still supported, but is deprecated and will be removed in one of the next releases.
================================================
FILE: changelogs/drizzle-orm/0.20.1.md
================================================
- 🎉 Added `{ logger: true }` shorthand to `drizzle()` to enable query logging. See [logging docs](/drizzle-orm/src/pg-core/README.md#logging) for detailed logging configuration.
================================================
FILE: changelogs/drizzle-orm/0.20.2.md
================================================
- 🎉 Added PostgreSQL network data types:
- `inet`
- `cidr`
- `macaddr`
- `macaddr8`
================================================
FILE: changelogs/drizzle-orm/0.20.3.md
================================================
- 🎉 Added support for locking clauses in SELECT (`SELECT ... FOR UPDATE`):
PostgreSQL
```ts
await db
.select()
.from(users)
.for('update')
.for('no key update', { of: users })
.for('no key update', { of: users, skipLocked: true })
.for('share', { of: users, noWait: true });
```
MySQL
```ts
await db.select().from(users).for('update');
await db.select().from(users).for('share', { skipLocked: true });
await db.select().from(users).for('update', { noWait: true });
```
- 🎉🐛 Custom column types now support returning `SQL` from `toDriver()` method in addition to the `driverData` type from generic.
================================================
FILE: changelogs/drizzle-orm/0.21.0.md
================================================
## Drizzle ORM 0.21.0 was released 🎉
- Added support for new migration folder structure and breakpoints feature, described in drizzle-kit release section
- Fix `onUpdateNow()` expression generation for default migration statement
### Support for PostgreSQL array types
---
```ts
export const salEmp = pgTable('sal_emp', {
name: text('name').notNull(),
payByQuarter: integer('pay_by_quarter').array(),
schedule: text('schedule').array().array(),
});
export const tictactoe = pgTable('tictactoe', {
squares: integer('squares').array(3).array(3),
});
```
drizzle kit will generate
```sql
CREATE TABLE sal_emp (
name text,
pay_by_quarter integer[],
schedule text[][]
);
CREATE TABLE tictactoe (
squares integer[3][3]
);
```
### Added composite primary key support to PostgreSQL and MySQL
---
PostgreSQL
```ts
import { primaryKey } from 'drizzle-orm/pg-core';
export const cpkTable = pgTable('table', {
column1: integer('column1').default(10).notNull(),
column2: integer('column2'),
column3: integer('column3'),
}, (table) => ({
cpk: primaryKey(table.column1, table.column2),
}));
```
MySQL
```ts
import { primaryKey } from 'drizzle-orm/mysql-core';
export const cpkTable = mysqlTable('table', {
simple: int('simple'),
columnNotNull: int('column_not_null').notNull(),
columnDefault: int('column_default').default(100),
}, (table) => ({
cpk: primaryKey(table.simple, table.columnDefault),
}));
```
---
## Drizzle Kit 0.17.0 was released 🎉
## Breaking changes
### Folder structure was migrated to newer version
Before running any new migrations `drizzle-kit` will ask you to upgrade in a first place
Migration file structure < 0.17.0
```plaintext
📦
└ 📂 migrations
└ 📂 20221207174503
├ 📜 migration.sql
├ 📜 snapshot.json
└ 📂 20230101104503
├ 📜 migration.sql
├ 📜 snapshot.json
```
Migration file structure >= 0.17.0
```plaintext
📦
└ 📂 migrations
└ 📂 meta
├ 📜 _journal.json
├ 📜 0000_snapshot.json
├ 📜 0001_snapshot.json
└ 📜 0000_icy_stranger.sql
└ 📜 0001_strange_avengers.sql
```
## Upgrading to 0.17.0
---

To easily migrate from previous folder structure to new you need to run `up` command in drizzle kit. It's a great helper to upgrade your migrations to new format on each drizzle kit major update
```bash
drizzle-kit up: # dialects: `pg`, `mysql`, `sqlite`
# example for pg
drizzle-kit up:pg
```
## New Features
### New `drizzle-kit` command called `drop`
In a case you think some of migrations were generated in a wrong way or you have made migration simultaneously with other developers you can easily rollback it by running simple command
> **Warning**:
> Make sure you are dropping migrations that were not applied to your database
```bash
drizzle-kit drop
```
This command will show you a list of all migrations you have and you'll need just to choose migration you want to drop. After that `drizzle-kit` will do all the hard work on deleting migration files

### New `drizzle-kit` option `--breakpoints` for `generate` and `introspect` commands
If particular driver doesn't support running multiple quries in 1 execution you can use `--breakpoints`.
`drizzle-kit` will generate current sql
```sql
CREATE TABLE `users` (
`id` int PRIMARY KEY NOT NULL,
`full_name` text NOT NULL,
);
--> statement-breakpoint
CREATE TABLE `table` (
`id` int PRIMARY KEY NOT NULL,
`phone` int,
);
```
Using it `drizzle-orm` will split all sql files by statements and execute them separately
### Add `drizzle-kit introspect` for MySQL dialect
You can introspect your mysql database using `introspect:mysql` command
```bash
drizzle-kit introspect:mysql --out ./migrations --connectionString mysql://user:password@127.0.0.1:3306/database
```

### Support for glob patterns for schema path
Usage example in `cli`
```bash
drizzle-kit generate:pg --out ./migrations --schema ./core/**/*.ts ./database/schema.ts
```
Usage example in `drizzle.config`
```text
{
"out: "./migrations",
"schema": ["./core/**/*.ts", "./database/schema.ts"]
}
```
## Bug Fixes and improvements
### Postgres dialect
---
**GitHub issue fixes**
- [pg] char is undefined during introspection [#9](https://github.com/drizzle-team/drizzle-kit-mirror/issues/9)
- when unknown type is detected, would be nice to emit a TODO comment instead of undefined [#8](https://github.com/drizzle-team/drizzle-kit-mirror/issues/8)
- "post_id" integer DEFAULT currval('posts_id_seq'::regclass) generates invalid TS [#7](https://github.com/drizzle-team/drizzle-kit-mirror/issues/7)
- "ip" INET NOT NULL is not supported [#6](https://github.com/drizzle-team/drizzle-kit-mirror/issues/6)
- "id" UUID NOT NULL DEFAULT uuid_generate_v4() type is not supported [#5](https://github.com/drizzle-team/drizzle-kit-mirror/issues/5)
- array fields end up as "undefined" in the schema [#4](https://github.com/drizzle-team/drizzle-kit-mirror/issues/4)
- timestamp is not in the import statement in schema.ts [#3](https://github.com/drizzle-team/drizzle-kit-mirror/issues/3)
- generated enums are not camel cased [#2](https://github.com/drizzle-team/drizzle-kit-mirror/issues/2)
**Introspect improvements**
- Add support for composite PK's generation;
- Add support for `cidr`, `inet`, `macaddr`, `macaddr8`, `smallserial`
- Add interval fields generation in schema, such as `minute to second`, `day to hour`, etc.
- Add default values for `numerics`
- Add default values for `enums`
### MySQL dialect
---
**Migration generation improvements**
- Add `autoincrement` create, delete and update handling
- Add `on update current_timestamp` handling for timestamps
- Add data type changing, using `modify`
- Add `not null` changing, using `modify`
- Add `default` drop and create statements
- Fix `defaults` generation bugs, such as escaping, date strings, expressions, etc
**Introspect improvements**
- Add `autoincrement` to all supported types
- Add `fsp` for time based data types
- Add precision and scale for `double`
- Make time `{ mode: "string" }` by default
- Add defaults to `json`, `decimal` and `binary` datatypes
- Add `enum` data type generation
================================================
FILE: changelogs/drizzle-orm/0.21.1.md
================================================
- 🎉 Added support for `HAVING` clause
- 🎉 Added support for referencing selected fields in `.where()`, `.having()`, `.groupBy()` and `.orderBy()` using an optional callback:
```ts
await db
.select({
id: citiesTable.id,
name: sql`upper(${citiesTable.name})`.as('upper_name'),
usersCount: sql`count(${users2Table.id})::int`.as('users_count'),
})
.from(citiesTable)
.leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id))
.where(({ name }) => sql`length(${name}) >= 3`)
.groupBy(citiesTable.id)
.having(({ usersCount }) => sql`${usersCount} > 0`)
.orderBy(({ name }) => name);
```
================================================
FILE: changelogs/drizzle-orm/0.22.0.md
================================================
- 🎉 Introduced a standalone query builder that can be used without a DB connection:
```ts
import { queryBuilder as qb } from 'drizzle-orm/pg-core';
const query = qb.select().from(users).where(eq(users.name, 'Dan'));
const { sql, params } = query.toSQL();
```
- 🎉 Improved `WITH ... SELECT` subquery creation syntax to more resemble SQL:
**Before**:
```ts
const regionalSales = db
.select({
region: orders.region,
totalSales: sql`sum(${orders.amount})`.as('total_sales'),
})
.from(orders)
.groupBy(orders.region)
.prepareWithSubquery('regional_sales');
await db.with(regionalSales).select(...).from(...);
```
**After**:
```ts
const regionalSales = db
.$with('regional_sales')
.as(
db
.select({
region: orders.region,
totalSales: sql`sum(${orders.amount})`.as('total_sales'),
})
.from(orders)
.groupBy(orders.region),
);
await db.with(regionalSales).select(...).from(...);
```
================================================
FILE: changelogs/drizzle-orm/0.23.0.md
================================================
- 🎉 Added Knex and Kysely adapters! They allow you to manage the schemas and migrations with Drizzle and query the data with your favorite query builder. See documentation for more details:
- [Knex adapter](https://github.com/drizzle-team/drizzle-knex)
- [Kysely adapter](https://github.com/drizzle-team/drizzle-kysely)
- 🎉 Added "type maps" to all entities. You can access them via the special `_` property. For example:
```ts
const users = mysqlTable('users', {
id: int('id').primaryKey(),
name: text('name').notNull(),
});
type UserFields = typeof users['_']['columns'];
type InsertUser = typeof users['_']['model']['insert'];
```
Full documentation on the type maps is coming soon.
- 🎉 Added `.$type()` method to all column builders to allow overriding the data type. It also replaces the optional generics on columns.
```ts
// Before
const test = mysqlTable('test', {
jsonField: json('json_field'),
});
// After
const test = mysqlTable('test', {
jsonField: json('json_field').$type(),
});
```
- ❗ Changed syntax for text-based enum columns:
```ts
// Before
const test = mysqlTable('test', {
role: text<'admin' | 'user'>('role'),
});
// After
const test = mysqlTable('test', {
role: text('role', { enum: ['admin', 'user'] }),
});
```
- 🎉 Allowed passing an array of values into `.insert().values()` directly without spreading:
```ts
const users = mysqlTable('users', {
id: int('id').primaryKey(),
name: text('name').notNull(),
});
await users.insert().values([
{ name: 'John' },
{ name: 'Jane' },
]);
```
The spread syntax is now deprecated and will be removed in one of the next releases.
- 🎉 Added "table creators" to allow for table name customization:
```ts
import { mysqlTableCreator } from 'drizzle-orm/mysql-core';
const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`);
const users = mysqlTable('users', {
id: int('id').primaryKey(),
name: text('name').notNull(),
});
// Users table is a normal table, but its name is `myprefix_users` in runtime
```
- 🎉 Implemented support for selecting/joining raw SQL expressions:
```ts
// select current_date + s.a as dates from generate_series(0,14,7) as s(a);
const result = await db
.select({
dates: sql`current_date + s.a`,
})
.from(sql`generate_series(0,14,7) as s(a)`);
```
- 🐛 Fixed a lot of bugs from user feedback on GitHub and Discord (thank you! ❤). Fixes #293 #301 #276 #269 #253 #311 #312
================================================
FILE: changelogs/drizzle-orm/0.23.1.md
================================================
- 🐛 Re-export `InferModel` from `drizzle-orm`
================================================
FILE: changelogs/drizzle-orm/0.23.10.md
================================================
- 🐛 Add missing config argument to transactions API
- 🐛 Fix Postgres and MySQL schema declaration (#427)
================================================
FILE: changelogs/drizzle-orm/0.23.11.md
================================================
- 🐛 Fix migrator function for PostgreSQL
> Would suggest to upgrade to this version anyone who is using postgres dialect. `0.23.9` and `0.23.10` are broken for postgresql migrations
================================================
FILE: changelogs/drizzle-orm/0.23.12.md
================================================
- 🐛 Fixed multi-level join results (e.g. joining a subquery with a nested join)
================================================
FILE: changelogs/drizzle-orm/0.23.13.md
================================================
- 🎉 All enum and text enum columns now have a properly typed `enumValues` property
================================================
FILE: changelogs/drizzle-orm/0.23.2.md
================================================
- 🐛 Rolled back some breaking changes for drizzle-kit
================================================
FILE: changelogs/drizzle-orm/0.23.3.md
================================================
- 🎉 Added [libSQL](https://libsql.org/) support
================================================
FILE: changelogs/drizzle-orm/0.23.4.md
================================================
- 🐛 Fixed broken types in Kysely and Knex adapters
================================================
FILE: changelogs/drizzle-orm/0.23.5.md
================================================
- 🐛 Various minor bugfixes
================================================
FILE: changelogs/drizzle-orm/0.23.6.md
================================================
- 🐛 Fixed referencing the selected aliased field in the same query
- 🐛 Fixed decimal column data type in MySQL
- 🐛 Fixed mode autocompletion for integer column in SQLite
- 🐛 Fixed extra parentheses in the generated SQL for the `IN` operator (#382)
- 🐛 Fixed regression in `pgEnum.enumValues` type (#358)
- 🎉 Allowed readonly arrays to be passed to `pgEnum`
================================================
FILE: changelogs/drizzle-orm/0.23.7.md
================================================
- 🎉 Added `INSERT IGNORE` support for MySQL (#305)
================================================
FILE: changelogs/drizzle-orm/0.23.8.md
================================================
- 🎉 Fixed dates timezone differences for timestamps in Postgres and MySQL (contributed by @AppelBoomHD via #288)
================================================
FILE: changelogs/drizzle-orm/0.23.9.md
================================================
# Transactions support 🎉
You can now use transactions with all the supported databases and drivers.
`node-postgres` example:
```ts
await db.transaction(async (tx) => {
await tx.insert(users).values(newUser);
await tx.update(users).set({ name: 'Mr. Dan' }).where(eq(users.name, 'Dan'));
await tx.delete(users).where(eq(users.name, 'Dan'));
});
```
For more information, see transactions docs:
- [PostgreSQL](/drizzle-orm/src/pg-core/README.md#transactions)
- [MySQL](/drizzle-orm/src/mysql-core/README.md#transactions)
- [SQLite](/drizzle-orm/src/sqlite-core/README.md#transactions)
================================================
FILE: changelogs/drizzle-orm/0.24.0.md
================================================
- 🎉 Added iterator support to `mysql2` (sponsored by @rizen ❤)
- ❗ `.prepare()` in MySQL no longer requires a name argument
================================================
FILE: changelogs/drizzle-orm/0.24.1.md
================================================
### Bugs
🐛 Fix onConflict targets in [#475](https://github.com/drizzle-team/drizzle-orm/pull/475) - thanks @wkunert ❤️
### Documentation
> Thanks to @tmcw we have started our way to get JSDoc documentation
📄 JSDoc for conditions in [#467](https://github.com/drizzle-team/drizzle-orm/pull/467) - thanks @tmcw ❤️
================================================
FILE: changelogs/drizzle-orm/0.24.2.md
================================================
- 🐛 Pool connections opened for transactions are now closed after the transaction is committed or rolled back
================================================
FILE: changelogs/drizzle-orm/0.24.3.md
================================================
- 🐛 Fixed query generation when selecting from alias
================================================
FILE: changelogs/drizzle-orm/0.24.4.md
================================================
- 🐛 Added verbose error when .values() is called without values (#441)
- 🐛 Fixed nested PG arrays mapping (#460)
- ❗ Removed spread syntax in .values() (#269)
- 🐛 Fixed passing undefined as field value to insert/update (#375)
================================================
FILE: changelogs/drizzle-orm/0.24.5.md
================================================
- Add possibility to have placeholders in `.limit()` and `.offset()`
```ts
const stmt = db
.select({
id: usersTable.id,
name: usersTable.name,
})
.from(usersTable)
.limit(placeholder('limit'))
.offset(placeholder('offset'))
.prepare('stmt');
const result = await stmt.execute({ limit: 1, offset: 1 });
```
================================================
FILE: changelogs/drizzle-orm/0.25.0.md
================================================
# ESM support
- 🎉 Added ESM support! You can now use `drizzle-orm` in both ESM and CJS environments.
- 🎉 Added code minification and source maps.
- ❗ Removed several nested import paths. Most notably, everything from `drizzle-orm/sql` and `drizzle-orm/expressions` should now be imported from `drizzle-orm` instead.
================================================
FILE: changelogs/drizzle-orm/0.25.1.md
================================================
- 🐛 Fix package.json `exports` field
================================================
FILE: changelogs/drizzle-orm/0.25.2.md
================================================
- 🎉 Documentation improvements (#495, #507)
- 🎉 Added `"sideEffects": false` to package.json (#515)
- 🐛 Fixed AWS Data API driver migrations (#510)
================================================
FILE: changelogs/drizzle-orm/0.25.3.md
================================================
- 🐛 Fix `pg` imports in ESM mode (#505)
- 🐛 Add "types" and "default" fields to "exports" entries in package.json (#511)
================================================
FILE: changelogs/drizzle-orm/0.25.4.md
================================================
- 🎉 Added support for [Vercel Postgres](https://vercel.com/docs/storage/vercel-postgres/quickstart)
```typescript
import { drizzle } from 'drizzle-orm/vercel-postgres';
import { sql } from "@vercel/postgres";
const db = drizzle(sql);
db.select(...)
```
================================================
FILE: changelogs/drizzle-orm/0.26.0.md
================================================
# Drizzle ORM 0.26.0 is here 🎉
## README docs are fully tranferred to web
The documentation has been completely reworked and updated with additional examples and explanations. You can find it here: https://orm.drizzle.team.
Furthermore, the entire documentation has been made open source, allowing you to edit and add any information you deem important for the community.
Visit https://github.com/drizzle-team/drizzle-orm-docs to access the open-sourced documentation.
Additionally, you can create specific documentation issues in this repository
## New Features
Introducing our first helper built on top of Drizzle Core API syntax: **the Relational Queries!** 🎉
With Drizzle RQ you can do:
1. Any amount of relations that will be mapped for you
2. Including or excluding! specific columns. You can also combine these options
3. Harness the flexibility of the `where` statements, allowing you to define custom conditions beyond the predefined ones available in the Drizzle Core API.
4. Expand the functionality by incorporating additional extras columns using SQL templates. For more examples, refer to the documentation.
Most importantly, regardless of the size of your query, Drizzle will always generate a **SINGLE optimized query**.
This efficiency extends to the usage of **Prepared Statements**, which are fully supported within the Relational Query Builder.
For more info: [Prepared Statements in Relational Query Builder](https://orm.drizzle.team/rqb#prepared-statements)
**Example of setting one-to-many relations**
> As you can observe, `relations` are a distinct concept that coexists alongside the main Drizzle schema. You have the flexibility to opt-in or opt-out of them at any time without affecting the `drizzle-kit` migrations or the logic for Core API's types and runtime.
```ts
import { integer, serial, text, pgTable } from 'drizzle-orm/pg-core';
import { relations } from 'drizzle-orm';
export const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
});
export const usersConfig = relations(users, ({ many }) => ({
posts: many(posts),
}));
export const posts = pgTable('posts', {
id: serial('id').primaryKey(),
content: text('content').notNull(),
authorId: integer('author_id').notNull(),
});
export const postsConfig = relations(posts, ({ one }) => ({
author: one(users, { fields: [posts.authorId], references: [users.id] }),
}));
```
**Example of querying you database**
Step 1: Provide all tables and relations to `drizzle` function
> `drizzle` import depends on the database driver you're using
```ts
import * as schema from './schema';
import { drizzle } from 'drizzle-orm/...';
const db = drizzle(client, { schema });
await db.query.users.findMany(...);
```
If you have schema in multiple files
```ts
import * as schema1 from './schema1';
import * as schema2 from './schema2';
import { drizzle } from 'drizzle-orm/...';
const db = drizzle(client, { schema: { ...schema1, ...schema2 } });
await db.query.users.findMany(...);
```
Step 2: Query your database with Relational Query Builder
**Select all users**
```ts
const users = await db.query.users.findMany();
```
**Select first users**
> `.findFirst()` will add limit 1 to the query
```ts
const user = await db.query.users.findFirst();
```
**Select all users**
Get all posts with just `id`, `content` and include `comments`
```ts
const posts = await db.query.posts.findMany({
columns: {
id: true,
content: true,
},
with: {
comments: true,
}
});
```
**Select all posts excluding `content` column**
```ts
const posts = await db.query.posts.findMany({
columns: {
content: false,
},
});
```
For more examples you can check [full docs](https://orm.drizzle.team/rqb) for Relational Queries
## Bug fixes
- 🐛 Fixed partial joins with prefixed tables (#542)
## Drizzle Kit updates
### New ways to define drizzle config file
You can now specify the configuration not only in the `.json` format but also in `.ts` and `.js` formats.
**TypeScript example**
```ts
import { Config } from "drizzle-kit";
export default {
schema: "",
connectionString: process.env.DB_URL,
out: "",
breakpoints: true
} satisfies Config;
```
**JavaScript example**
```js
/** @type { import("drizzle-kit").Config } */
export default {
schema: "",
connectionString: "",
out: "",
breakpoints: true
};
```
## New commands 🎉
### `drizzle-kit push:mysql`
You can now push your MySQL schema directly to the database without the need to create and manage migration files. This feature proves to be particularly useful for rapid local development and when working with PlanetScale databases.
By pushing the MySQL schema directly to the database, you can streamline the development process and avoid the overhead of managing migration files. This allows for more efficient iteration and quick deployment of schema changes during local development.
### How to setup your codebase for drizzle-kit push feature?
1. For this feature, you need to create a `drizzle.config.[ts|js|json]` file. We recommend using `.ts` or `.js` files as they allow you to easily provide the database connection information as secret variables
You'll need to specify `schema` and `connectionString`(or `db`, `port`, `host`, `password`, etc.) to make `drizzle-kit push:mysql` work
`drizzle.config.ts` example
```ts copy
import { Config } from "src";
export default {
schema: "./schema.ts",
connectionString: process.env.DB_URL,
} satisfies Config;
```
2. Run `drizzle-kit push:mysql`
3. If Drizzle detects any potential `data-loss` issues during a migration, it will prompt you to approve whether the data should be truncated or not in order to ensure a successful migration
4. Approve or reject the action that Drizzle needs to perform in order to push your schema changes to the database.
5. Done ✅
================================================
FILE: changelogs/drizzle-orm/0.26.1.md
================================================
- 🐛 Fixed including multiple relations on the same level in RQB (#599)
- 🐛 Updated migrators for relational queries support (#601)
- 🐛 Fixed invoking .findMany() without arguments
================================================
FILE: changelogs/drizzle-orm/0.26.2.md
================================================
- 🐛 Fixed upsert targeting composite keys for SQLite (#521)
- 🐛 AWS Data API+Postgres: fixed adding of typings when merging queries (#517)
- 🐛 Fixed "on conflict" with "where" clause for Postgres (#651)
- 🐛 Various GitHub docs community fixes and improvements ♥ (#547, #548, #587, #606, #609, #625)
- **Experimental**: added OpenTelemetry support for Postgres
================================================
FILE: changelogs/drizzle-orm/0.26.3.md
================================================
- Disabled OTEL integration due to the top-level await issues
================================================
FILE: changelogs/drizzle-orm/0.26.4.md
================================================
- 🐛 Fixed AWS Data API mapping in relational queries (#677, #681)
- 🐛 Allowed using named self-relations (#678)
- 🐛 Fixed querying relations with composite FKs (#683)
================================================
FILE: changelogs/drizzle-orm/0.26.5.md
================================================
- 🎉 Added bigint mode to SQLite (#558)
================================================
FILE: changelogs/drizzle-orm/0.27.0.md
================================================
## Correct behavior when installed in a monorepo (multiple Drizzle instances)
Replacing all `instanceof` statements with a custom `is()` function allowed us to handle multiple Drizzle packages interacting properly.
**It also fixes one of our biggest Discord tickets: `maximum call stack exceeded` 🎉**
You should now use `is()` instead of `instanceof` to check if specific objects are instances of specific Drizzle types. It might be useful if you are building something on top of the Drizzle API.
```ts
import { is, Column } from 'drizzle-orm'
if (is(value, Column)) {
// value's type is narrowed to Column
}
```
## `distinct` clause support
```ts
await db.selectDistinct().from(usersDistinctTable).orderBy(
usersDistinctTable.id,
usersDistinctTable.name,
);
```
Also, `distinct on` clause is available for PostgreSQL:
```ts
await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy(
usersDistinctTable.id,
);
await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from(
usersDistinctTable,
).orderBy(usersDistinctTable.name);
```
## `bigint` and `boolean` support for SQLite
Contributed by @MrRahulRamkumar (#558), @raducristianpopa (#411) and @meech-ward (#725)
```ts
const users = sqliteTable('users', {
bigintCol: blob('bigint', { mode: 'bigint' }).notNull(),
boolCol: integer('bool', { mode: 'boolean' }).notNull(),
});
```
## DX improvements
- Added verbose type error when relational queries are used on a database type without a schema generic
- Fix `where` callback in RQB for tables without relations
## Various docs improvements
- Fix joins docs typo (#522) by @arjunyel
- Add Supabase guide to readme (#690) by @saltcod
- Make the column type in sqlite clearer (#717) by @shairez
================================================
FILE: changelogs/drizzle-orm/0.27.1.md
================================================
- 🎉 Added support for [Neon HTTP driver](https://neon.tech/docs/serverless/serverless-driver)
```typescript
import { neon, neonConfig } from '@neondatabase/serverless';
import { drizzle } from 'drizzle-orm/neon-http';
neonConfig.fetchConnectionCache = true;
const sql = neon(process.env.DRIZZLE_DATABASE_URL!);
const db = drizzle(sql);
db.select(...)
```
================================================
FILE: changelogs/drizzle-orm/0.27.2.md
================================================
## 🎉 Added support for `UNIQUE` constraints in PostgreSQL, MySQL, SQLite
For PostgreSQL, unique constraints can be defined at the column level for single-column constraints, and in the third parameter for multi-column constraints. In both cases, it will be possible to define a custom name for the constraint. Additionally, PostgreSQL will receive the `NULLS NOT DISTINCT` option to restrict having more than one NULL value in a table. [Reference](https://www.postgresql.org/docs/current/ddl-constraints.html#DDL-CONSTRAINTS-UNIQUE-CONSTRAINTS)
Examples that just shows a different `unique` usage. Please don't search a real usage for those tables
```ts
// single column
const table = pgTable('table', {
id: serial('id').primaryKey(),
name: text('name').notNull().unique(),
state: char('state', { length: 2 }).unique('custom'),
field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }),
});
// multiple columns
const table = pgTable('table', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
state: char('state', { length: 2 }),
}, (t) => ({
first: unique('custom_name').on(t.name, t.state).nullsNotDistinct(),
second: unique('custom_name1').on(t.name, t.state),
}));
```
For MySQL, everything will be the same except for the `NULLS NOT DISTINCT` option. It appears that MySQL does not support it
Examples that just shows a different `unique` usage. Please don't search a real usage for those tables
```ts
// single column
const table = mysqlTable('table', {
id: serial('id').primaryKey(),
name: text('name').notNull().unique(),
state: text('state').unique('custom'),
field: text('field').unique('custom_field'),
});
// multiple columns
const table = mysqlTable('cities1', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
state: text('state'),
}, (t) => ({
first: unique().on(t.name, t.state),
second: unique('custom_name1').on(t.name, t.state),
}));
```
In SQLite unique constraints are the same as unique indexes. As long as you can specify a name for the unique index in SQLite - we will treat all unique constraints as unique indexes in internal implementation
```ts
// single column
const table = sqliteTable('table', {
id: int('id').primaryKey(),
name: text('name').notNull().unique(),
state: text('state').unique('custom'),
field: text('field').unique(),
});
// multiple columns
const table = sqliteTable('table', {
id: int('id').primaryKey(),
name: text('name').notNull(),
state: text('state'),
}, (t) => ({
first: unique().on(t.name, t.state),
second: unique('custom').on(t.name, t.state),
}));
```
================================================
FILE: changelogs/drizzle-orm/0.28.0.md
================================================
## Breaking changes
### Removed support for filtering by nested relations
Current example won't work in `0.28.0`:
```ts
const usersWithPosts = await db.query.users.findMany({
where: (table, { sql }) => (sql`json_array_length(${table.posts}) > 0`),
with: {
posts: true,
},
});
```
The `table` object in the `where` callback won't have fields from `with` and `extras`. We removed them to be able to build more efficient relational queries, which improved row reads and performance.
If you have used those fields in the `where` callback before, there are several workarounds:
1. Applying those filters manually on the code level after the rows are fetched;
2. Using the core API.
### Added Relational Queries `mode` config for `mysql2` driver
Drizzle relational queries always generate exactly one SQL statement to run on the database and it has certain caveats. To have best in class support for every database out there we've introduced modes.
Drizzle relational queries use lateral joins of subqueries under the hood and for now PlanetScale does not support them.
When using `mysql2` driver with regular MySQL database - you should specify mode: "default".
When using `mysql2` driver with PlanetScale - you need to specify mode: "planetscale".
```ts
import { drizzle } from 'drizzle-orm/mysql2';
import mysql from 'mysql2/promise';
import * as schema from './schema';
const connection = await mysql.createConnection({
uri: process.env.PLANETSCALE_DATABASE_URL,
});
const db = drizzle(connection, { schema, mode: 'planetscale' });
```
## Improved IntelliSense performance for large schemas
We've run the diagnostics on a database schema with 85 tables, 666 columns, 26 enums, 172 indexes and 133 foreign keys. We've optimized internal types which resulted in **430%** speed up in IntelliSense.
## Improved Relational Queries Permormance and Read Usage
In this release we've fully changed a way query is generated for Relational Queri API.
As a summary we've made current set of changes in query generation startegy:
1. Lateral Joins: In the new version we're utilizing lateral joins, denoted by the "LEFT JOIN LATERAL" clauses, to retrieve specific data from related tables efficiently For MySQL in PlanetScale and SQLite, we've used simple subquery selects, which improved a query plan and overall performance
2. Selective Data Retrieval: In the new version we're retrieving only the necessary data from tables. This targeted data retrieval reduces the amount of unnecessary information fetched, resulting in a smaller dataset to process and faster execution.
3. Reduced Aggregations: In the new version we've reduced the number of aggregation functions (e.g., COUNT, json_agg). By using json_build_array directly within the lateral joins, drizzle is aggregating the data in a more streamlined manner, leading to improved query performance.
4. Simplified Grouping: In the new version the GROUP BY clause is removed, as the lateral joins and subqueries already handle data aggregation more efficiently.
For this drizzle query
```ts
const items = await db.query.comments.findMany({
limit,
orderBy: comments.id,
with: {
user: {
columns: { name: true },
},
post: {
columns: { title: true },
with: {
user: {
columns: { name: true },
},
},
},
},
});
```
Query that Drizzle generates now
```sql
select "comments"."id",
"comments"."user_id",
"comments"."post_id",
"comments"."content",
"comments_user"."data" as "user",
"comments_post"."data" as "post"
from "comments"
left join lateral (select json_build_array("comments_user"."name") as "data"
from (select *
from "users" "comments_user"
where "comments_user"."id" = "comments"."user_id"
limit 1) "comments_user") "comments_user" on true
left join lateral (select json_build_array("comments_post"."title", "comments_post_user"."data") as "data"
from (select *
from "posts" "comments_post"
where "comments_post"."id" = "comments"."post_id"
limit 1) "comments_post"
left join lateral (select json_build_array("comments_post_user"."name") as "data"
from (select *
from "users" "comments_post_user"
where "comments_post_user"."id" = "comments_post"."user_id"
limit 1) "comments_post_user") "comments_post_user"
on true) "comments_post" on true
order by "comments"."id"
limit 1
```
Query generated before:
```sql
SELECT "id",
"user_id",
"post_id",
"content",
"user"::JSON,
"post"::JSON
FROM
(SELECT "comments".*,
CASE
WHEN count("comments_post"."id") = 0 THEN '[]'
ELSE json_agg(json_build_array("comments_post"."title", "comments_post"."user"::JSON))::text
END AS "post"
FROM
(SELECT "comments".*,
CASE
WHEN count("comments_user"."id") = 0 THEN '[]'
ELSE json_agg(json_build_array("comments_user"."name"))::text
END AS "user"
FROM "comments"
LEFT JOIN
(SELECT "comments_user".*
FROM "users" "comments_user") "comments_user" ON "comments"."user_id" = "comments_user"."id"
GROUP BY "comments"."id",
"comments"."user_id",
"comments"."post_id",
"comments"."content") "comments"
LEFT JOIN
(SELECT "comments_post".*
FROM
(SELECT "comments_post".*,
CASE
WHEN count("comments_post_user"."id") = 0 THEN '[]'
ELSE json_agg(json_build_array("comments_post_user"."name"))
END AS "user"
FROM "posts" "comments_post"
LEFT JOIN
(SELECT "comments_post_user".*
FROM "users" "comments_post_user") "comments_post_user" ON "comments_post"."user_id" = "comments_post_user"."id"
GROUP BY "comments_post"."id") "comments_post") "comments_post" ON "comments"."post_id" = "comments_post"."id"
GROUP BY "comments"."id",
"comments"."user_id",
"comments"."post_id",
"comments"."content",
"comments"."user") "comments"
LIMIT 1
```
## Possibility to insert rows with default values for all columns
You can now provide an empty object or an array of empty objects, and Drizzle will insert all defaults into the database.
```ts
// Insert 1 row with all defaults
await db.insert(usersTable).values({});
// Insert 2 rows with all defaults
await db.insert(usersTable).values([{}, {}]);
```
================================================
FILE: changelogs/drizzle-orm/0.28.1.md
================================================
- 🐛 Fixed Postgres array-related issues introduced by 0.28.0 (#983, #992)
================================================
FILE: changelogs/drizzle-orm/0.28.2.md
================================================
## The community contributions release 🎉
### Internal Features and Changes
1. Added a set of tests for d1. Thanks to @AdiRishi!
2. Fixed issues in internal documentation. Thanks to @balazsorban44 and @pyk!
### Bug Fixes
1. Resolved the issue of truncating timestamp milliseconds for MySQL. Thanks to @steviec!
2. Corrected the type of the get() method for sqlite-based dialects. Issue #565 has been closed. Thanks to @stefanmaric!
3. Rectified the sqlite-proxy bug that caused the query to execute twice. Thanks to @mosch!
### New packages 🎉
Added a support for [Typebox](https://github.com/sinclairzx81/typebox) in [drizzle-typebox](https://orm.drizzle.team/docs/typebox) package. Thanks to @Bulbang!
Please check documentation page for more usage examples: https://orm.drizzle.team/docs/typebox
================================================
FILE: changelogs/drizzle-orm/0.28.3.md
================================================
- 🎉 Added SQLite simplified query API
- 🎉 Added `.$defaultFn()` / `.$default()` methods to column builders
You can specify any logic and any implementation for a function like `cuid()` for runtime defaults. Drizzle won't limit you in the number of implementations you can add.
> Note: This value does not affect the `drizzle-kit` behavior, it is only used at runtime in `drizzle-orm`
```ts
import { varchar, mysqlTable } from "drizzle-orm/mysql-core";
import { createId } from '@paralleldrive/cuid2';
const table = mysqlTable('table', {
id: varchar('id', { length: 128 }).$defaultFn(() => createId()),
});
```
- 🎉 Added `table.$inferSelect` / `table._.inferSelect` and `table.$inferInsert` / `table._.inferInsert` for more convenient table model type inference
- 🛠 Deprecated `InferModel` type in favor of more explicit `InferSelectModel` and `InferInsertModel`
```ts
import { InferSelectModel, InferInsertModel } from 'drizzle-orm'
const usersTable = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
verified: boolean('verified').notNull().default(false),
jsonb: jsonb('jsonb').$type(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
});
type SelectUser = typeof usersTable.$inferSelect;
type InsertUser = typeof usersTable.$inferInsert;
type SelectUser2 = InferSelectModel;
type InsertUser2 = InferInsertModel;
```
- 🛠 Disabled `.d.ts` files bundling
- 🐛 Fixed sqlite-proxy and SQL.js response from `.get()` when the result is empty
================================================
FILE: changelogs/drizzle-orm/0.28.4.md
================================================
- 🐛 Fixed imports in ESM-based projects (#1088)
- 🐛 Fixed type error on Postgres table definitions (#1089)
================================================
FILE: changelogs/drizzle-orm/0.28.5.md
================================================
- 🐛 Fixed incorrect OpenTelemetry type import that caused a runtime error
================================================
FILE: changelogs/drizzle-orm/0.28.6.md
================================================
## Changes
> **Note**:
> MySQL `datetime` with `mode: 'date'` will now store dates in UTC strings and retrieve data in UTC as well to align with MySQL behavior for `datetime`. If you need a different behavior and want to handle `datetime` mapping in a different way, please use `mode: 'string'` or [Custom Types](https://orm.drizzle.team/docs/custom-types) implementation
Check [Fix Datetime mapping for MySQL](https://github.com/drizzle-team/drizzle-orm/pull/1082) for implementation details
## New Features
### 🎉 `LibSQL` batch api support
Reference: https://docs.turso.tech/reference/client-access/javascript-typescript-sdk#execute-a-batch-of-statements
Batch API usage example:
```ts
const batchResponse = await db.batch([
db.insert(usersTable).values({ id: 1, name: 'John' }).returning({
id: usersTable.id,
}),
db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)),
db.query.usersTable.findMany({}),
db.select().from(usersTable).where(eq(usersTable.id, 1)),
db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(
usersTable,
),
]);
```
Type for `batchResponse` in this example would be:
```ts
type BatchResponse = [
{
id: number;
}[],
ResultSet,
{
id: number;
name: string;
verified: number;
invitedBy: number | null;
}[],
{
id: number;
name: string;
verified: number;
invitedBy: number | null;
}[],
{
id: number;
invitedBy: number | null;
}[],
];
```
All possible builders that can be used inside `db.batch`:
```ts
`db.all()`,
`db.get()`,
`db.values()`,
`db.run()`,
`db.query..findMany()`,
`db.query..findFirst()`,
`db.select()...`,
`db.update()...`,
`db.delete()...`,
`db.insert()...`,
```
More usage examples here: [integration-tests/tests/libsql-batch.test.ts](https://github.com/drizzle-team/drizzle-orm/pull/1161/files#diff-17253895532e520545027dd48dcdbac2d69a5a49d594974e6d55d7502f89b838R248) and in [docs](https://orm.drizzle.team/docs/batch-api)
### 🎉 Add json mode for text in SQLite
Example
```ts
const test = sqliteTable('test', {
dataTyped: text('data_typed', { mode: 'json' }).$type<{ a: 1 }>().notNull(),
});
```
### 🎉 Add `.toSQL()` to Relational Query API calls
Example
```ts
const query = db.query.usersTable.findFirst().toSQL();
```
### 🎉 Added new PostgreSQL operators for Arrays - thanks @L-Mario564
List of operators and usage examples
`arrayContains`, `arrayContained`, `arrayOverlaps`
```ts
const contains = await db.select({ id: posts.id }).from(posts)
.where(arrayContains(posts.tags, ['Typescript', 'ORM']));
const contained = await db.select({ id: posts.id }).from(posts)
.where(arrayContained(posts.tags, ['Typescript', 'ORM']));
const overlaps = await db.select({ id: posts.id }).from(posts)
.where(arrayOverlaps(posts.tags, ['Typescript', 'ORM']));
const withSubQuery = await db.select({ id: posts.id }).from(posts)
.where(arrayContains(
posts.tags,
db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)),
));
```
### 🎉 Add more SQL operators for where filter function in Relational Queries - thanks @cayter!
**Before**
```ts
import { inArray } from "drizzle-orm/pg-core";
await db.users.findFirst({
where: (table, _) => inArray(table.id, [ ... ])
})
```
**After**
```ts
await db.users.findFirst({
where: (table, { inArray }) => inArray(table.id, [ ... ])
})
```
## Bug Fixes
- 🐛 [Correct where in on conflict in sqlite](https://github.com/drizzle-team/drizzle-orm/pull/1076) - Thanks @hanssonduck!
- 🐛 [Fix libsql/client type import](https://github.com/drizzle-team/drizzle-orm/pull/1122) - Thanks @luisfvieirasilva!
- 🐛 [Fix: raw sql query not being mapped properly on RDS](https://github.com/drizzle-team/drizzle-orm/pull/1071) - Thanks @boian-ivanov
- 🐛 [Fix Datetime mapping for MySQL](https://github.com/drizzle-team/drizzle-orm/pull/1082) - thanks @Angelelz
- 🐛 [Fix smallserial generating as serial](https://github.com/drizzle-team/drizzle-orm/pull/1127) - thanks @L-Mario564
================================================
FILE: changelogs/drizzle-orm/0.29.0.md
================================================
> Drizzle ORM version `0.29.0` will require a minimum Drizzle Kit version of `0.20.0`, and vice versa. Therefore, when upgrading to a newer version of Drizzle ORM, you will also need to upgrade Drizzle Kit. This may result in some breaking changes throughout the versions, especially if you need to upgrade Drizzle Kit and your Drizzle ORM version is older than `<0.28.0`
## New Features
### 🎉 MySQL `unsigned` option for bigint
You can now specify `bigint unsigned` type
```ts
const table = mysqlTable('table', {
id: bigint('id', { mode: 'number', unsigned: true }),
});
```
Read more in [docs](https://orm.drizzle.team/docs/column-types/mysql#bigint)
### 🎉 Improved query builder types
Starting from `0.29.0` by default, as all the query builders in Drizzle try to conform to SQL as much as possible, you can only invoke most of the methods once. For example, in a SELECT statement there might only be one WHERE clause, so you can only invoke .where() once:
```ts
const query = db
.select()
.from(users)
.where(eq(users.id, 1))
.where(eq(users.name, 'John')); // ❌ Type error - where() can only be invoked once
```
This behavior is useful for conventional query building, i.e. when you create the whole query at once. However, it becomes a problem when you want to build a query dynamically, i.e. if you have a shared function that takes a query builder and enhances it. To solve this problem, Drizzle provides a special 'dynamic' mode for query builders, which removes the restriction of invoking methods only once. To enable it, you need to call .$dynamic() on a query builder.
Let's see how it works by implementing a simple withPagination function that adds LIMIT and OFFSET clauses to a query based on the provided page number and an optional page size:
```ts
function withPagination(
qb: T,
page: number,
pageSize: number = 10,
) {
return qb.limit(pageSize).offset(page * pageSize);
}
const query = db.select().from(users).where(eq(users.id, 1));
withPagination(query, 1); // ❌ Type error - the query builder is not in dynamic mode
const dynamicQuery = query.$dynamic();
withPagination(dynamicQuery, 1); // ✅ OK
```
Note that the withPagination function is generic, which allows you to modify the result type of the query builder inside it, for example by adding a join:
```ts
function withFriends(qb: T) {
return qb.leftJoin(friends, eq(friends.userId, users.id));
}
let query = db.select().from(users).where(eq(users.id, 1)).$dynamic();
query = withFriends(query);
```
Read more in [docs](https://orm.drizzle.team/docs/dynamic-query-building)
### 🎉 Possibility to specify name for primary keys and foreign keys
There is an issue when constraint names exceed the 64-character limit of the database. This causes the database engine to truncate the name, potentially leading to issues. Starting from `0.29.0`, you have the option to specify custom names for both `primaryKey()` and `foreignKey()`. We have also deprecated the old `primaryKey()` syntax, which can still be used but will be removed in future releases
```ts
const table = pgTable('table', {
id: integer('id'),
name: text('name'),
}, (table) => ({
cpk: primaryKey({ name: 'composite_key', columns: [table.id, table.name] }),
cfk: foreignKey({
name: 'fkName',
columns: [table.id],
foreignColumns: [table.name],
}),
}));
```
Read more in [docs](https://orm.drizzle.team/docs/indexes-constraints#composite-primary-key)
### 🎉 Read Replicas Support
You can now use the Drizzle `withReplica` function to specify different database connections for read replicas and the main instance for write operations. By default, `withReplicas` will use a random read replica for read operations and the main instance for all other data modification operations. You can also specify custom logic for choosing which read replica connection to use. You have the freedom to make any weighted, custom decision for that. Here are some usage examples:
```ts
const primaryDb = drizzle(client);
const read1 = drizzle(client);
const read2 = drizzle(client);
const db = withReplicas(primaryDb, [read1, read2]);
// read from primary
db.$primary.select().from(usersTable);
// read from either read1 connection or read2 connection
db.select().from(usersTable)
// use primary database for delete operation
db.delete(usersTable).where(eq(usersTable.id, 1))
```
Implementation example of custom logic for selecting read replicas, where the first replica has a 70% chance of being chosen, and the second replica has a 30% chance of being chosen. Note that you can implement any type of random selection for read replicas
```ts
const db = withReplicas(primaryDb, [read1, read2], (replicas) => {
const weight = [0.7, 0.3];
let cumulativeProbability = 0;
const rand = Math.random();
for (const [i, replica] of replicas.entries()) {
cumulativeProbability += weight[i]!;
if (rand < cumulativeProbability) return replica;
}
return replicas[0]!
});
```
`withReplicas` function is available for all dialects in Drizzle ORM
Read more in [docs](https://orm.drizzle.team/docs/read-replicas)
### 🎉 Set operators support (UNION, UNION ALL, INTERSECT, INTERSECT ALL, EXCEPT, EXCEPT ALL)
Huge thanks to @Angelelz for the significant contribution he made, from API discussions to proper type checks and runtime logic, along with an extensive set of tests. This greatly assisted us in delivering this feature in this release
Usage examples:
All set operators can be used in a two ways: `import approach` or `builder approach`
##### Import approach
```ts
import { union } from 'drizzle-orm/pg-core'
const allUsersQuery = db.select().from(users);
const allCustomersQuery = db.select().from(customers);
const result = await union(allUsersQuery, allCustomersQuery)
```
##### Builder approach
```ts
const result = await db.select().from(users).union(db.select().from(customers));
```
Read more in [docs](https://orm.drizzle.team/docs/set-operations)
### 🎉 New MySQL Proxy Driver
A new driver has been released, allowing you to create your own implementation for an HTTP driver using a MySQL database. You can find usage examples in the `./examples/mysql-proxy` folder
You need to implement two endpoints on your server that will be used for queries and migrations(Migrate endpoint is optional and only if you want to use drizzle migrations). Both the server and driver implementation are up to you, so you are not restricted in any way. You can add custom mappings, logging, and much more
You can find both server and driver implementation examples in the `./examples/mysql-proxy` folder
```ts
// Driver
import axios from 'axios';
import { eq } from 'drizzle-orm/expressions';
import { drizzle } from 'drizzle-orm/mysql-proxy';
import { migrate } from 'drizzle-orm/mysql-proxy/migrator';
import { cities, users } from './schema';
async function main() {
const db = drizzle(async (sql, params, method) => {
try {
const rows = await axios.post(`${process.env.REMOTE_DRIVER}/query`, {
sql,
params,
method,
});
return { rows: rows.data };
} catch (e: any) {
console.error('Error from pg proxy server:', e.response.data);
return { rows: [] };
}
});
await migrate(db, async (queries) => {
try {
await axios.post(`${process.env.REMOTE_DRIVER}/migrate`, { queries });
} catch (e) {
console.log(e);
throw new Error('Proxy server cannot run migrations');
}
}, { migrationsFolder: 'drizzle' });
await db.insert(cities).values({ id: 1, name: 'name' });
await db.insert(users).values({
id: 1,
name: 'name',
email: 'email',
cityId: 1,
});
const usersToCityResponse = await db.select().from(users).leftJoin(
cities,
eq(users.cityId, cities.id),
);
}
```
### 🎉 New PostgreSQL Proxy Driver
Same as MySQL you can now implement your own http driver for PostgreSQL database. You can find usage examples in the `./examples/pg-proxy` folder
You need to implement two endpoints on your server that will be used for queries and migrations (Migrate endpoint is optional and only if you want to use drizzle migrations). Both the server and driver implementation are up to you, so you are not restricted in any way. You can add custom mappings, logging, and much more
You can find both server and driver implementation examples in the `./examples/pg-proxy` folder
```ts
import axios from 'axios';
import { eq } from 'drizzle-orm/expressions';
import { drizzle } from 'drizzle-orm/pg-proxy';
import { migrate } from 'drizzle-orm/pg-proxy/migrator';
import { cities, users } from './schema';
async function main() {
const db = drizzle(async (sql, params, method) => {
try {
const rows = await axios.post(`${process.env.REMOTE_DRIVER}/query`, { sql, params, method });
return { rows: rows.data };
} catch (e: any) {
console.error('Error from pg proxy server:', e.response.data);
return { rows: [] };
}
});
await migrate(db, async (queries) => {
try {
await axios.post(`${process.env.REMOTE_DRIVER}/query`, { queries });
} catch (e) {
console.log(e);
throw new Error('Proxy server cannot run migrations');
}
}, { migrationsFolder: 'drizzle' });
const insertedCity = await db.insert(cities).values({ id: 1, name: 'name' }).returning();
const insertedUser = await db.insert(users).values({ id: 1, name: 'name', email: 'email', cityId: 1 });
const usersToCityResponse = await db.select().from(users).leftJoin(cities, eq(users.cityId, cities.id));
}
```
### 🎉 `D1` Batch API support
Reference: https://developers.cloudflare.com/d1/platform/client-api/#dbbatch
Batch API usage example:
```ts
const batchResponse = await db.batch([
db.insert(usersTable).values({ id: 1, name: 'John' }).returning({
id: usersTable.id,
}),
db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)),
db.query.usersTable.findMany({}),
db.select().from(usersTable).where(eq(usersTable.id, 1)),
db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(
usersTable,
),
]);
```
Type for `batchResponse` in this example would be:
```ts
type BatchResponse = [
{
id: number;
}[],
D1Result,
{
id: number;
name: string;
verified: number;
invitedBy: number | null;
}[],
{
id: number;
name: string;
verified: number;
invitedBy: number | null;
}[],
{
id: number;
invitedBy: number | null;
}[],
];
```
All possible builders that can be used inside `db.batch`:
```ts
`db.all()`,
`db.get()`,
`db.values()`,
`db.run()`,
`db.query..findMany()`,
`db.query..findFirst()`,
`db.select()...`,
`db.update()...`,
`db.delete()...`,
`db.insert()...`,
```
More usage examples here: [integration-tests/tests/d1-batch.test.ts](https://github.com/drizzle-team/drizzle-orm/blob/beta/integration-tests/tests/d1-batch.test.ts) and in [docs](https://orm.drizzle.team/docs/batch-api)
---
## Drizzle Kit 0.20.0
1. New way to define drizzle.config using `defineConfig` function
2. Possibility to access Cloudflare D1 with Drizzle Studio using wrangler.toml file
3. Drizzle Studio is migrating to https://local.drizzle.studio/
4. `bigint unsigned` support
5. `primaryKeys` and `foreignKeys` now can have custom names
6. Environment variables are now automatically fetched
7. Some bug fixes and improvements
You can read more about drizzle-kit updates [here](https://github.com/drizzle-team/drizzle-kit-mirror/releases/tag/v0.20.0)
================================================
FILE: changelogs/drizzle-orm/0.29.1.md
================================================
# Fixes
- Forward args correctly when using withReplica feature #1536. Thanks @Angelelz
- Fix selectDistinctOn not working with multiple columns #1466. Thanks @L-Mario564
# New Features/Helpers
## 🎉 Detailed JSDoc for all query builders in all dialects - thanks @realmikesolo
You can now access more information, hints, documentation links, etc. while developing and using JSDoc right in your IDE. Previously, we had them only for filter expressions, but now you can see them for all parts of the Drizzle query builder
## 🎉 New helpers for aggregate functions in SQL - thanks @L-Mario564
> Remember, aggregation functions are often used with the GROUP BY clause of the SELECT statement. So if you are selecting using aggregating functions and other columns in one query,
be sure to use the `.groupBy` clause
Here is a list of functions and equivalent using `sql` template
**count**
```ts
await db.select({ value: count() }).from(users);
await db.select({ value: count(users.id) }).from(users);
// It's equivalent to writing
await db.select({
value: sql`count('*'))`.mapWith(Number)
}).from(users);
await db.select({
value: sql`count(${users.id})`.mapWith(Number)
}).from(users);
```
**countDistinct**
```ts
await db.select({ value: countDistinct(users.id) }).from(users);
// It's equivalent to writing
await db.select({
value: sql`count(${users.id})`.mapWith(Number)
}).from(users);
```
**avg**
```ts
await db.select({ value: avg(users.id) }).from(users);
// It's equivalent to writing
await db.select({
value: sql`avg(${users.id})`.mapWith(String)
}).from(users);
```
**avgDistinct**
```ts
await db.select({ value: avgDistinct(users.id) }).from(users);
// It's equivalent to writing
await db.select({
value: sql`avg(distinct ${users.id})`.mapWith(String)
}).from(users);
```
**sum**
```ts
await db.select({ value: sum(users.id) }).from(users);
// It's equivalent to writing
await db.select({
value: sql`sum(${users.id})`.mapWith(String)
}).from(users);
```
**sumDistinct**
```ts
await db.select({ value: sumDistinct(users.id) }).from(users);
// It's equivalent to writing
await db.select({
value: sql`sum(distinct ${users.id})`.mapWith(String)
}).from(users);
```
**max**
```ts
await db.select({ value: max(users.id) }).from(users);
// It's equivalent to writing
await db.select({
value: sql`max(${expression})`.mapWith(users.id)
}).from(users);
```
**min**
```ts
await db.select({ value: min(users.id) }).from(users);
// It's equivalent to writing
await db.select({
value: sql`min(${users.id})`.mapWith(users.id)
}).from(users);
```
# New Packages
## 🎉 ESLint Drizzle Plugin
For cases where it's impossible to perform type checks for specific scenarios, or where it's possible but error messages would be challenging to understand, we've decided to create an ESLint package with recommended rules. This package aims to assist developers in handling crucial scenarios during development
> Big thanks to @Angelelz for initiating the development of this package and transferring it to the Drizzle Team's npm
## Install
```sh
[ npm | yarn | pnpm | bun ] install eslint eslint-plugin-drizzle
```
You can install those packages for typescript support in your IDE
```sh
[ npm | yarn | pnpm | bun ] install @typescript-eslint/eslint-plugin @typescript-eslint/parser
```
## Usage
Create a `.eslintrc.yml` file, add `drizzle` to the `plugins`, and specify the rules you want to use. You can find a list of all existing rules below
```yml
root: true
parser: '@typescript-eslint/parser'
parserOptions:
project: './tsconfig.json'
plugins:
- drizzle
rules:
'drizzle/enforce-delete-with-where': "error"
'drizzle/enforce-update-with-where': "error"
```
### All config
This plugin exports an [`all` config](src/configs/all.js) that makes use of all rules (except for deprecated ones).
```yml
root: true
extends:
- "plugin:drizzle/all"
parser: '@typescript-eslint/parser'
parserOptions:
project: './tsconfig.json'
plugins:
- drizzle
```
At the moment, `all` is equivalent to `recommended`
```yml
root: true
extends:
- "plugin:drizzle/recommended"
parser: '@typescript-eslint/parser'
parserOptions:
project: './tsconfig.json'
plugins:
- drizzle
```
## Rules
**enforce-delete-with-where**: Enforce using `delete` with the`.where()` clause in the `.delete()` statement. Most of the time, you don't need to delete all rows in the table and require some kind of `WHERE` statements.
**Error Message**:
```
Without `.where(...)` you will delete all the rows in a table. If you didn't want to do it, please use `db.delete(...).where(...)` instead. Otherwise you can ignore this rule here
```
Optionally, you can define a `drizzleObjectName` in the plugin options that accept a `string` or `string[]`. This is useful when you have objects or classes with a delete method that's not from Drizzle. Such a `delete` method will trigger the ESLint rule. To avoid that, you can define the name of the Drizzle object that you use in your codebase (like db) so that the rule would only trigger if the delete method comes from this object:
Example, config 1:
```json
"rules": {
"drizzle/enforce-delete-with-where": ["error"]
}
```
```ts
class MyClass {
public delete() {
return {}
}
}
const myClassObj = new MyClass();
// ---> Will be triggered by ESLint Rule
myClassObj.delete()
const db = drizzle(...)
// ---> Will be triggered by ESLint Rule
db.delete()
```
Example, config 2:
```json
"rules": {
"drizzle/enforce-delete-with-where": ["error", { "drizzleObjectName": ["db"] }],
}
```
```ts
class MyClass {
public delete() {
return {}
}
}
const myClassObj = new MyClass();
// ---> Will NOT be triggered by ESLint Rule
myClassObj.delete()
const db = drizzle(...)
// ---> Will be triggered by ESLint Rule
db.delete()
```
**enforce-update-with-where**: Enforce using `update` with the`.where()` clause in the `.update()` statement. Most of the time, you don't need to update all rows in the table and require some kind of `WHERE` statements.
**Error Message**:
```
Without `.where(...)` you will update all the rows in a table. If you didn't want to do it, please use `db.update(...).set(...).where(...)` instead. Otherwise you can ignore this rule here
```
Optionally, you can define a `drizzleObjectName` in the plugin options that accept a `string` or `string[]`. This is useful when you have objects or classes with a delete method that's not from Drizzle. Such as `update` method will trigger the ESLint rule. To avoid that, you can define the name of the Drizzle object that you use in your codebase (like db) so that the rule would only trigger if the delete method comes from this object:
Example, config 1:
```json
"rules": {
"drizzle/enforce-update-with-where": ["error"]
}
```
```ts
class MyClass {
public update() {
return {}
}
}
const myClassObj = new MyClass();
// ---> Will be triggered by ESLint Rule
myClassObj.update()
const db = drizzle(...)
// ---> Will be triggered by ESLint Rule
db.update()
```
Example, config 2:
```json
"rules": {
"drizzle/enforce-update-with-where": ["error", { "drizzleObjectName": ["db"] }],
}
```
```ts
class MyClass {
public update() {
return {}
}
}
const myClassObj = new MyClass();
// ---> Will NOT be triggered by ESLint Rule
myClassObj.update()
const db = drizzle(...)
// ---> Will be triggered by ESLint Rule
db.update()
```
================================================
FILE: changelogs/drizzle-orm/0.29.2.md
================================================
## Fixes
- Added improvements to the planescale relational tests #1579 - thanks @Angelelz
- [Pg] FIX: correct string escaping for empty PgArrays #1640 - thanks @Angelelz
- Fix wrong syntax for exists fn in sqlite #1647 - thanks @Angelelz
- Properly handle dates in AWS Data API
- Fix Hermes mixins constructor issue
## ESLint Drizzle Plugin, v0.2.3
```
npm i eslint-plugin-drizzle@0.2.3
```
🎉 **[ESLint] Add support for functions and improve error messages #1586 - thanks @ngregrichardson**
- Allowed Drizzle object to be or to be retrieved from a function, e.g.
- Added better context to the suggestion in the error message.
## New Drivers
### 🎉 Expo SQLite Driver is available
For starting with Expo SQLite Driver, you need to install `expo-sqlite` and `drizzle-orm` packages.
```bash
npm install drizzle-orm expo-sqlite@next
```
Then, you can use it like this:
```ts
import { drizzle } from "drizzle-orm/expo-sqlite";
import { openDatabaseSync } from "expo-sqlite/next";
const expoDb = openDatabaseSync("db.db");
const db = drizzle(expoDb);
await db.select().from(...)...
// or
db.select().from(...).then(...);
// or
db.select().from(...).all();
```
If you want to use Drizzle Migrations, you need to update babel and metro configuration files.
1. Install `babel-plugin-inline-import` package.
```bash
npm install babel-plugin-inline-import
```
2. Update `babel.config.js` and `metro.config.js` files.
babel.config.js
```diff
module.exports = function(api) {
api.cache(true);
return {
presets: ['babel-preset-expo'],
+ plugins: [["inline-import", { "extensions": [".sql"] }]]
};
};
```
metro.config.js
```diff
const { getDefaultConfig } = require('expo/metro-config');
/** @type {import('expo/metro-config').MetroConfig} */
const config = getDefaultConfig(__dirname);
+config.resolver.sourceExts.push('sql');
module.exports = config;
```
3. Create `drizzle.config.ts` file in your project root folder.
```ts
import type { Config } from 'drizzle-kit';
export default {
schema: './db/schema.ts',
out: './drizzle',
driver: 'expo',
} satisfies Config;
```
After creating schema file and drizzle.config.ts file, you can generate migrations like this:
```bash
npx drizzle-kit generate:sqlite
```
Then you need to import `migrations.js` file in your `App.tsx` file from `./drizzle` folder and use hook `useMigrations` or `migrate` function.
```tsx
import { drizzle } from "drizzle-orm/expo-sqlite";
import { openDatabaseSync } from "expo-sqlite/next";
import { useMigrations } from 'drizzle-orm/expo-sqlite/migrator';
import migrations from './drizzle/migrations';
const expoDb = openDatabaseSync("db.db");
const db = drizzle(expoDb);
export default function App() {
const { success, error } = useMigrations(db, migrations);
if (error) {
return (
Migration error: {error.message}
);
}
if (!success) {
return (
Migration is in progress...
);
}
return ...your application component;
}
```
================================================
FILE: changelogs/drizzle-orm/0.29.3.md
================================================
- fix: make expo peer dependencies optional #1714
================================================
FILE: changelogs/drizzle-orm/0.29.4.md
================================================
## New Features
### 🎉 **Neon HTTP Batch**
For more info you can check [Neon docs](https://neon.tech/docs/serverless/serverless-driver#issue-multiple-queries-with-the-transaction-function)
**Example**
```ts
const batchResponse: BatchType = await db.batch([
db.insert(usersTable).values({ id: 1, name: 'John' }).returning({
id: usersTable.id,
}),
db.insert(usersTable).values({ id: 2, name: 'Dan' }),
db.query.usersTable.findMany({}),
db.query.usersTable.findFirst({}),
]);
```
```ts
type BatchType = [
{
id: number;
}[],
NeonHttpQueryResult,
{
id: number;
name: string;
verified: number;
invitedBy: number | null;
}[],
{
id: number;
name: string;
verified: number;
invitedBy: number | null;
} | undefined,
];
```
## Improvements
Thanks to the `database-js` and `PlanetScale` teams, we have updated the default behavior and instances of `database-js`.
As suggested by the `database-js` core team, you should use the `Client` instance instead of `connect()`:
```typescript
import { Client } from '@planetscale/database';
import { drizzle } from 'drizzle-orm/planetscale-serverless';
// create the connection
const client = new Client({
host: process.env['DATABASE_HOST'],
username: process.env['DATABASE_USERNAME'],
password: process.env['DATABASE_PASSWORD'],
});
const db = drizzle(client);
```
> Warning: In this version, there are no breaking changes, but starting from version `0.30.0`, you will encounter an error if you attempt to use anything other than a `Client` instance.
>
> We suggest starting to change connections to PlanetScale now to prevent any runtime errors in the future.
Previously our docs stated to use `connect()` and only this function was can be passed to drizzle. In this realase we are adding support for `new Client()` and deprecating `connect()`, by suggesting from `database-js` team. In this release you will see a `warning` when trying to pass `connect()` function result:
**Warning text**
```mdx
Warning: You need to pass an instance of Client:
import { Client } from "@planetscale/database";
const client = new Client({
host: process.env["DATABASE_HOST"],
username: process.env["DATABASE_USERNAME"],
password: process.env["DATABASE_PASSWORD"],
});
const db = drizzle(client);
Starting from version 0.30.0, you will encounter an error if you attempt to use anything other than a Client instance.
Please make the necessary changes now to prevent any runtime errors in the future
```
================================================
FILE: changelogs/drizzle-orm/0.29.5.md
================================================
## New Features
### 🎉 WITH UPDATE, WITH DELETE, WITH INSERT - thanks @L-Mario564
You can now use `WITH` statements with [INSERT](https://orm.drizzle.team/docs/insert#with-insert-clause), [UPDATE](https://orm.drizzle.team/docs/update#with-update-clause) and [DELETE](https://orm.drizzle.team/docs/delete#with-delete-clause) statements
Usage examples
```ts
const averageAmount = db.$with('average_amount').as(
db.select({ value: sql`avg(${orders.amount})`.as('value') }).from(orders),
);
const result = await db
.with(averageAmount)
.delete(orders)
.where(gt(orders.amount, sql`(select * from ${averageAmount})`))
.returning({
id: orders.id,
});
```
Generated SQL:
```sql
with "average_amount" as (select avg("amount") as "value" from "orders")
delete from "orders"
where "orders"."amount" > (select * from "average_amount")
returning "id"
```
For more examples for all statements, check docs:
- [with insert docs](https://orm.drizzle.team/docs/insert#with-insert-clause)
- [with update docs](https://orm.drizzle.team/docs/update#with-update-clause)
- [with delete docs](https://orm.drizzle.team/docs/delete#with-delete-clause)
### 🎉 Possibility to specify custom schema and custom name for migrations table - thanks @g3r4n
- **Custom table for migrations**
By default, all information about executed migrations will be stored in the database inside the `__drizzle_migrations` table,
and for PostgreSQL, inside the `drizzle` schema. However, you can configure where to store those records.
To add a custom table name for migrations stored inside your database, you should use the `migrationsTable` option
Usage example
```ts
await migrate(db, {
migrationsFolder: './drizzle',
migrationsTable: 'my_migrations',
});
```
- **Custom schema for migrations**
> Works only with PostgreSQL databases
To add a custom schema name for migrations stored inside your database, you should use the `migrationsSchema` option
Usage example
```ts
await migrate(db, {
migrationsFolder: './drizzle',
migrationsSchema: 'custom',
});
```
### 🎉 SQLite Proxy batch and Relational Queries support
- You can now use `.query.findFirst` and `.query.findMany` syntax with sqlite proxy driver
- SQLite Proxy supports batch requests, the same as it's done for all other drivers. Check full [docs](https://orm.drizzle.team/docs/batch-api)
You will need to specify a specific callback for batch queries and handle requests to proxy server:
```ts
import { drizzle } from 'drizzle-orm/sqlite-proxy';
type ResponseType = { rows: any[][] | any[] }[];
const db = drizzle(
async (sql, params, method) => {
// single query logic
},
// new batch callback
async (
queries: {
sql: string;
params: any[];
method: 'all' | 'run' | 'get' | 'values';
}[],
) => {
try {
const result: ResponseType = await axios.post(
'http://localhost:3000/batch',
{ queries },
);
return result;
} catch (e: any) {
console.error('Error from sqlite proxy server:', e);
throw e;
}
},
);
```
And then you can use `db.batch([])` method, that will proxy all queries
> Response from the batch should be an array of raw values (an array within an array), in the same order as they were sent to the proxy server
================================================
FILE: changelogs/drizzle-orm/0.30.0.md
================================================
## Breaking Changes
The Postgres timestamp mapping has been changed to align all drivers with the same behavior.
❗ We've modified the `postgres.js` driver instance to always return strings for dates, and then Drizzle will provide you with either strings of mapped dates, depending on the selected `mode`. The only issue you may encounter is that once you provide the `postgres.js`` driver instance inside Drizzle, the behavior of this object will change for dates, which will always be strings.
We've made this change as a minor release, just as a warning, that:
- If you were using timestamps and were waiting for a specific response, the behavior will now be changed.
When mapping to the driver, we will always use `.toISOString` for both timestamps with timezone and without timezone.
- If you were using the `postgres.js` driver outside of Drizzle, all `postgres.js` clients passed to Drizzle will have mutated behavior for dates. All dates will be strings in the response.
Parsers that were changed for `postgres.js`.
```ts
const transparentParser = (val: any) => val;
// Override postgres.js default date parsers: https://github.com/porsager/postgres/discussions/761
for (const type of ['1184', '1082', '1083', '1114']) {
client.options.parsers[type as any] = transparentParser;
client.options.serializers[type as any] = transparentParser;
}
```
Ideally, as is the case with almost all other drivers, we should have the possibility to mutate mappings on a per-query basis, which means that the driver client won't be mutated. We will be reaching out to the creator of the `postgres.js` library to inquire about the possibility of specifying per-query mapping interceptors and making this flow even better for all users.
If we've overlooked this capability and it is already available with `postgres.js``, please ping us in our Discord!
A few more references for timestamps without and with timezones can be found in our [docs](http://orm.drizzle.team/docs/column-types/pg#timestamp)
## Bug fixed in this release
- [BUG]: timestamp with mode string is returned as Date object instead of string - #806
- [BUG]: Dates are always dates #971
- [BUG]: Inconsistencies when working with timestamps and corresponding datetime objects in javascript. #1176
- [BUG]: timestamp columns showing string type, however actually returning a Date object. #1185
- [BUG]: Wrong data type for postgres date colum #1407
- [BUG]: invalid timestamp conversion when using PostgreSQL with TimeZone set to UTC #1587
- [BUG]: Postgres insert into timestamp with time zone removes milliseconds #1061
- [BUG]: update timestamp field (using AWS Data API) #1164
- [BUG]: Invalid date from relational queries #895
================================================
FILE: changelogs/drizzle-orm/0.30.1.md
================================================
## New Features
### 🎉 OP-SQLite driver Support
Usage Example
```ts
import { open } from '@op-engineering/op-sqlite';
import { drizzle } from 'drizzle-orm/op-sqlite';
const opsqlite = open({
name: 'myDB',
});
const db = drizzle(opsqlite);
await db.select().from(users);
```
For more usage and setup details, please check our [op-sqlite docs](http://orm.drizzle.team/docs/get-started-sqlite#op-sqlite)
### Bug fixes
- Migration hook fixed for Expo driver
================================================
FILE: changelogs/drizzle-orm/0.30.10.md
================================================
## New Features
### 🎉 `.if()` function added to all WHERE expressions
#### Select all users after cursors if a cursor value was provided
```ts
function getUsersAfter(cursor?: number) {
return db.select().from(users).where(
gt(users.id, cursor).if(cursor)
);
}
```
## Bug Fixes
- Fixed internal mappings for sessions `.all`, `.values`, `.execute` functions in AWS DataAPI
================================================
FILE: changelogs/drizzle-orm/0.30.2.md
================================================
## Improvements
LibSQL migrations have been updated to utilize batch execution instead of transactions. As stated in the [documentation](https://docs.turso.tech/sdk/ts/reference#batch-transactions), LibSQL now supports batch operations
> A batch consists of multiple SQL statements executed sequentially within an implicit transaction. The backend handles the transaction: success commits all changes, while any failure results in a full rollback with no modifications.
## Bug fixed
- [Sqlite] Fix findFirst query for bun:sqlite #1885 - thanks @shaileshaanand
================================================
FILE: changelogs/drizzle-orm/0.30.3.md
================================================
- 🎉 Added raw query support (`db.execute(...)`) to batch API in Neon HTTP driver
- 🐛 Fixed `@neondatabase/serverless` HTTP driver types issue (#1945, neondatabase/serverless#66)
- 🐛 Fixed sqlite-proxy driver `.run()` result
================================================
FILE: changelogs/drizzle-orm/0.30.4.md
================================================
## New Features
### 🎉 xata-http driver support
According their **[official website](https://xata.io)**, Xata is a Postgres data platform with a focus on reliability, scalability, and developer experience. The Xata Postgres service is currently in beta, please see the [Xata docs](https://xata.io/docs/postgres) on how to enable it in your account.
Drizzle ORM natively supports both the `xata` driver with `drizzle-orm/xata` package and the **[`postgres`](#postgresjs)** or **[`pg`](#node-postgres)** drivers for accessing a Xata Postgres database.
The following example use the Xata generated client, which you obtain by running the [xata init](https://xata.io/docs/getting-started/installation) CLI command.
```bash
pnpm add drizzle-orm @xata.io/client
```
```ts
import { drizzle } from 'drizzle-orm/xata-http';
import { getXataClient } from './xata'; // Generated client
const xata = getXataClient();
const db = drizzle(xata);
const result = await db.select().from(...);
```
You can also connect to Xata using `pg` or `postgres.js` drivers
================================================
FILE: changelogs/drizzle-orm/0.30.5.md
================================================
## New Features
### 🎉 `$onUpdate` functionality for PostgreSQL, MySQL and SQLite
Adds a dynamic update value to the column.
The function will be called when the row is updated, and the returned value will be used as the column value if none is provided.
If no `default` (or `$defaultFn`) value is provided, the function will be called when the row is inserted as well, and the returned value will be used as the column value.
> Note: This value does not affect the `drizzle-kit` behavior, it is only used at runtime in `drizzle-orm`.
```ts
const usersOnUpdate = pgTable('users_on_update', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`),
updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()),
alwaysNull: text('always_null').$type().$onUpdate(() => null),
});
```
## Fixes
- [BUG]: insertions on columns with the smallserial datatype are not optional - #1848
Thanks @Angelelz and @gabrielDonnantuoni!
================================================
FILE: changelogs/drizzle-orm/0.30.6.md
================================================
## New Features
### 🎉 PGlite driver Support
PGlite is a WASM Postgres build packaged into a TypeScript client library that enables you to run Postgres in the browser, Node.js and Bun, with no need to install any other dependencies. It is only 2.6mb gzipped.
It can be used as an ephemeral in-memory database, or with persistence either to the file system (Node/Bun) or indexedDB (Browser).
Unlike previous "Postgres in the browser" projects, PGlite does not use a Linux virtual machine - it is simply Postgres in WASM.
Usage Example
```ts
import { PGlite } from '@electric-sql/pglite';
import { drizzle } from 'drizzle-orm/pglite';
// In-memory Postgres
const client = new PGlite();
const db = drizzle(client);
await db.select().from(users);
```
---
There are currently 2 limitations, that should be fixed on Pglite side:
- [Attempting to refresh a materialised view throws error](https://github.com/electric-sql/pglite/issues/63)
- [Attempting to SET TIME ZONE throws error](https://github.com/electric-sql/pglite/issues/62)
================================================
FILE: changelogs/drizzle-orm/0.30.7-preview.md
================================================
- 🎉 Added custom schema support to enums in Postgres:
```ts
import { pgSchema } from 'drizzle-orm/pg-core';
const mySchema = pgSchema('mySchema');
const colors = mySchema.enum('colors', ['red', 'green', 'blue']);
```
- 🐛 Split `where` clause in Postgres `.onConflictDoUpdate` method into `setWhere` and `targetWhere` clauses, to support both `where` cases in `on conflict ...` clause (#1628, #1302)
- 🐛 Fix query generation for `where` clause in Postgres `.onConflictDoNothing` method, as it was placed in a wrong spot (#1628)
================================================
FILE: changelogs/drizzle-orm/0.30.7.md
================================================
## Bug fixes
- Add mappings for `@vercel/postgres` package
- Fix interval mapping for `neon` drivers - #1542
================================================
FILE: changelogs/drizzle-orm/0.30.8.md
================================================
- 🎉 Added custom schema support to enums in Postgres (fixes #669 via #2048):
```ts
import { pgSchema } from 'drizzle-orm/pg-core';
const mySchema = pgSchema('mySchema');
const colors = mySchema.enum('colors', ['red', 'green', 'blue']);
```
- 🎉 Changed D1 `migrate()` function to use batch API (#2137)
- 🐛 Split `where` clause in Postgres `.onConflictDoUpdate` method into `setWhere` and `targetWhere` clauses, to support both `where` cases in `on conflict ...` clause (fixes #1628, #1302 via #2056)
- 🐛 Fixed query generation for `where` clause in Postgres `.onConflictDoNothing` method, as it was placed in a wrong spot (fixes #1628 via #2056)
- 🐛 Fixed multiple issues with AWS Data API driver (fixes #1931, #1932, #1934, #1936 via #2119)
- 🐛 Fix inserting and updating array values in AWS Data API (fixes #1912 via #1911)
Thanks @hugo082 and @livingforjesus!
================================================
FILE: changelogs/drizzle-orm/0.30.9.md
================================================
- 🐛 Fixed migrator in AWS Data API
- Added `setWhere` and `targetWhere` fields to `.onConflictDoUpdate()` config in SQLite instead of single `where` field
- 🛠️ Added schema information to Drizzle instances via `db._.fullSchema`
================================================
FILE: changelogs/drizzle-orm/0.31.0-beta.md
================================================
## Breaking changes
### PostgreSQL indexes API was changed
The previous Drizzle+PostgreSQL indexes API was incorrect and was not aligned with the PostgreSQL documentation. The good thing is that it was not used in queries, and drizzle-kit didn't support all properties for indexes. This means we can now change the API to the correct one and provide full support for it in drizzle-kit
Previous API
- No way to define SQL expressions inside `.on`.
- `.using` and `.on` in our case are the same thing, so the API is incorrect here.
- `.asc()`, `.desc()`, `.nullsFirst()`, and `.nullsLast()` should be specified for each column or expression on indexes, but not on an index itself.
```ts
// Index declaration reference
index('name')
.on(table.column1, table.column2, ...) or .onOnly(table.column1, table.column2, ...)
.concurrently()
.using(sql``) // sql expression
.asc() or .desc()
.nullsFirst() or .nullsLast()
.where(sql``) // sql expression
```
Current API
```ts
// First example, with `.on()`
index('name')
.on(table.column1.asc(), table.column2.nullsFirst(), ...) or .onOnly(table.column1.desc().nullsLast(), table.column2, ...)
.concurrently()
.where(sql``)
.with({ fillfactor: '70' })
// Second Example, with `.using()`
index('name')
.using('btree', table.column1.asc(), sql`lower(${table.column2})`, table.column1.op('text_ops'))
.where(sql``) // sql expression
.with({ fillfactor: '70' })
```
## New Features
### 🎉 "pg_vector" extension support
> There is no specific code to create an extension inside the Drizzle schema. We assume that if you are using vector types, indexes, and queries, you have a PostgreSQL database with the `pg_vector` extension installed.
You can now specify indexes for `pg_vector` and utilize `pg_vector` functions for querying, ordering, etc.
Let's take a few examples of `pg_vector` indexes from the `pg_vector` docs and translate them to Drizzle
#### L2 distance, Inner product and Cosine distance
```ts
// CREATE INDEX ON items USING hnsw (embedding vector_l2_ops);
// CREATE INDEX ON items USING hnsw (embedding vector_ip_ops);
// CREATE INDEX ON items USING hnsw (embedding vector_cosine_ops);
const table = pgTable('items', {
embedding: vector('embedding', { dimensions: 3 })
}, (table) => ({
l2: index('l2_index').using('hnsw', table.embedding.op('vector_l2_ops'))
ip: index('ip_index').using('hnsw', table.embedding.op('vector_ip_ops'))
cosine: index('cosine_index').using('hnsw', table.embedding.op('vector_cosine_ops'))
}))
```
#### L1 distance, Hamming distance and Jaccard distance - added in pg_vector 0.7.0 version
```ts
// CREATE INDEX ON items USING hnsw (embedding vector_l1_ops);
// CREATE INDEX ON items USING hnsw (embedding bit_hamming_ops);
// CREATE INDEX ON items USING hnsw (embedding bit_jaccard_ops);
const table = pgTable('table', {
embedding: vector('embedding', { dimensions: 3 })
}, (table) => ({
l1: index('l1_index').using('hnsw', table.embedding.op('vector_l1_ops'))
hamming: index('hamming_index').using('hnsw', table.embedding.op('bit_hamming_ops'))
bit: index('bit_jaccard_index').using('hnsw', table.embedding.op('bit_jaccard_ops'))
}))
```
For queries, you can use predefined functions for vectors or create custom ones using the SQL template operator.
You can also use the following helpers:
```ts
import { l2Distance, l1Distance, innerProduct,
cosineDistance, hammingDistance, jaccardDistance } from 'drizzle-orm'
l2Distance(table.column, [3, 1, 2]) // table.column <-> '[3, 1, 2]'
l1Distance(table.column, [3, 1, 2]) // table.column <+> '[3, 1, 2]'
innerProduct(table.column, [3, 1, 2]) // table.column <#> '[3, 1, 2]'
cosineDistance(table.column, [3, 1, 2]) // table.column <=> '[3, 1, 2]'
hammingDistance(table.column, '101') // table.column <~> '101'
jaccardDistance(table.column, '101') // table.column <%> '101'
```
If `pg_vector` has some other functions to use, you can replicate implimentation from existing one we have. Here is how it can be done
```ts
export function l2Distance(
column: SQLWrapper | AnyColumn,
value: number[] | string[] | TypedQueryBuilder | string,
): SQL {
if (is(value, TypedQueryBuilder) || typeof value === 'string') {
return sql`${column} <-> ${value}`;
}
return sql`${column} <-> ${JSON.stringify(value)}`;
}
```
Name it as you wish and change the operator. This example allows for a numbers array, strings array, string, or even a select query. Feel free to create any other type you want or even contribute and submit a PR
#### Examples
Let's take a few examples of `pg_vector` queries from the `pg_vector` docs and translate them to Drizzle
```ts
import { l2Distance } from 'drizzle-orm';
// SELECT * FROM items ORDER BY embedding <-> '[3,1,2]' LIMIT 5;
db.select().from(items).orderBy(l2Distance(items.embedding, [3,1,2]))
// SELECT embedding <-> '[3,1,2]' AS distance FROM items;
db.select({ distance: l2Distance(items.embedding, [3,1,2]) })
// SELECT * FROM items ORDER BY embedding <-> (SELECT embedding FROM items WHERE id = 1) LIMIT 5;
const subquery = db.select({ embedding: items.embedding }).from(items).where(eq(items.id, 1));
db.select().from(items).orderBy(l2Distance(items.embedding, subquery)).limit(5)
// SELECT (embedding <#> '[3,1,2]') * -1 AS inner_product FROM items;
db.select({ innerProduct: sql`(${maxInnerProduct(items.embedding, [3,1,2])}) * -1` }).from(items)
// and more!
```
- 🛠️ Fixed RQB behavior for tables with same names in different schemas
================================================
FILE: changelogs/drizzle-orm/0.31.0.md
================================================
## Breaking changes
> Note: `drizzle-orm@0.31.0` can be used with `drizzle-kit@0.22.0` or higher. The same applies to Drizzle Kit. If you run a Drizzle Kit command, it will check and prompt you for an upgrade (if needed). You can check for Drizzle Kit updates. [below](#drizzle-kit-updates-drizzle-kit0220)
### PostgreSQL indexes API was changed
The previous Drizzle+PostgreSQL indexes API was incorrect and was not aligned with the PostgreSQL documentation. The good thing is that it was not used in queries, and drizzle-kit didn't support all properties for indexes. This means we can now change the API to the correct one and provide full support for it in drizzle-kit
Previous API
- No way to define SQL expressions inside `.on`.
- `.using` and `.on` in our case are the same thing, so the API is incorrect here.
- `.asc()`, `.desc()`, `.nullsFirst()`, and `.nullsLast()` should be specified for each column or expression on indexes, but not on an index itself.
```ts
// Index declaration reference
index('name')
.on(table.column1, table.column2, ...) or .onOnly(table.column1, table.column2, ...)
.concurrently()
.using(sql``) // sql expression
.asc() or .desc()
.nullsFirst() or .nullsLast()
.where(sql``) // sql expression
```
Current API
```ts
// First example, with `.on()`
index('name')
.on(table.column1.asc(), table.column2.nullsFirst(), ...) or .onOnly(table.column1.desc().nullsLast(), table.column2, ...)
.concurrently()
.where(sql``)
.with({ fillfactor: '70' })
// Second Example, with `.using()`
index('name')
.using('btree', table.column1.asc(), sql`lower(${table.column2})`, table.column1.op('text_ops'))
.where(sql``) // sql expression
.with({ fillfactor: '70' })
```
## New Features
### 🎉 "pg_vector" extension support
> There is no specific code to create an extension inside the Drizzle schema. We assume that if you are using vector types, indexes, and queries, you have a PostgreSQL database with the `pg_vector` extension installed.
You can now specify indexes for `pg_vector` and utilize `pg_vector` functions for querying, ordering, etc.
Let's take a few examples of `pg_vector` indexes from the `pg_vector` docs and translate them to Drizzle
#### L2 distance, Inner product and Cosine distance
```ts
// CREATE INDEX ON items USING hnsw (embedding vector_l2_ops);
// CREATE INDEX ON items USING hnsw (embedding vector_ip_ops);
// CREATE INDEX ON items USING hnsw (embedding vector_cosine_ops);
const table = pgTable('items', {
embedding: vector('embedding', { dimensions: 3 })
}, (table) => ({
l2: index('l2_index').using('hnsw', table.embedding.op('vector_l2_ops'))
ip: index('ip_index').using('hnsw', table.embedding.op('vector_ip_ops'))
cosine: index('cosine_index').using('hnsw', table.embedding.op('vector_cosine_ops'))
}))
```
#### L1 distance, Hamming distance and Jaccard distance - added in pg_vector 0.7.0 version
```ts
// CREATE INDEX ON items USING hnsw (embedding vector_l1_ops);
// CREATE INDEX ON items USING hnsw (embedding bit_hamming_ops);
// CREATE INDEX ON items USING hnsw (embedding bit_jaccard_ops);
const table = pgTable('table', {
embedding: vector('embedding', { dimensions: 3 })
}, (table) => ({
l1: index('l1_index').using('hnsw', table.embedding.op('vector_l1_ops'))
hamming: index('hamming_index').using('hnsw', table.embedding.op('bit_hamming_ops'))
bit: index('bit_jaccard_index').using('hnsw', table.embedding.op('bit_jaccard_ops'))
}))
```
For queries, you can use predefined functions for vectors or create custom ones using the SQL template operator.
You can also use the following helpers:
```ts
import { l2Distance, l1Distance, innerProduct,
cosineDistance, hammingDistance, jaccardDistance } from 'drizzle-orm'
l2Distance(table.column, [3, 1, 2]) // table.column <-> '[3, 1, 2]'
l1Distance(table.column, [3, 1, 2]) // table.column <+> '[3, 1, 2]'
innerProduct(table.column, [3, 1, 2]) // table.column <#> '[3, 1, 2]'
cosineDistance(table.column, [3, 1, 2]) // table.column <=> '[3, 1, 2]'
hammingDistance(table.column, '101') // table.column <~> '101'
jaccardDistance(table.column, '101') // table.column <%> '101'
```
If `pg_vector` has some other functions to use, you can replicate implimentation from existing one we have. Here is how it can be done
```ts
export function l2Distance(
column: SQLWrapper | AnyColumn,
value: number[] | string[] | TypedQueryBuilder | string,
): SQL {
if (is(value, TypedQueryBuilder) || typeof value === 'string') {
return sql`${column} <-> ${value}`;
}
return sql`${column} <-> ${JSON.stringify(value)}`;
}
```
Name it as you wish and change the operator. This example allows for a numbers array, strings array, string, or even a select query. Feel free to create any other type you want or even contribute and submit a PR
#### Examples
Let's take a few examples of `pg_vector` queries from the `pg_vector` docs and translate them to Drizzle
```ts
import { l2Distance } from 'drizzle-orm';
// SELECT * FROM items ORDER BY embedding <-> '[3,1,2]' LIMIT 5;
db.select().from(items).orderBy(l2Distance(items.embedding, [3,1,2]))
// SELECT embedding <-> '[3,1,2]' AS distance FROM items;
db.select({ distance: l2Distance(items.embedding, [3,1,2]) })
// SELECT * FROM items ORDER BY embedding <-> (SELECT embedding FROM items WHERE id = 1) LIMIT 5;
const subquery = db.select({ embedding: items.embedding }).from(items).where(eq(items.id, 1));
db.select().from(items).orderBy(l2Distance(items.embedding, subquery)).limit(5)
// SELECT (embedding <#> '[3,1,2]') * -1 AS inner_product FROM items;
db.select({ innerProduct: sql`(${maxInnerProduct(items.embedding, [3,1,2])}) * -1` }).from(items)
// and more!
```
## 🎉 New PostgreSQL types: `point`, `line`
You can now use `point` and `line` from [PostgreSQL Geometric Types](https://www.postgresql.org/docs/current/datatype-geometric.html)
Type `point` has 2 modes for mappings from the database: `tuple` and `xy`.
- `tuple` will be accepted for insert and mapped on select to a tuple. So, the database Point(1,2) will be typed as [1,2] with drizzle.
- `xy` will be accepted for insert and mapped on select to an object with x, y coordinates. So, the database Point(1,2) will be typed as `{ x: 1, y: 2 }` with drizzle
```ts
const items = pgTable('items', {
point: point('point'),
pointObj: point('point_xy', { mode: 'xy' }),
});
```
Type `line` has 2 modes for mappings from the database: `tuple` and `abc`.
- `tuple` will be accepted for insert and mapped on select to a tuple. So, the database Line{1,2,3} will be typed as [1,2,3] with drizzle.
- `abc` will be accepted for insert and mapped on select to an object with a, b, and c constants from the equation `Ax + By + C = 0`. So, the database Line{1,2,3} will be typed as `{ a: 1, b: 2, c: 3 }` with drizzle.
```ts
const items = pgTable('items', {
line: line('line'),
lineObj: point('line_abc', { mode: 'abc' }),
});
```
## 🎉 Basic "postgis" extension support
> There is no specific code to create an extension inside the Drizzle schema. We assume that if you are using postgis types, indexes, and queries, you have a PostgreSQL database with the `postgis` extension installed.
`geometry` type from postgis extension:
```ts
const items = pgTable('items', {
geo: geometry('geo', { type: 'point' }),
geoObj: geometry('geo_obj', { type: 'point', mode: 'xy' }),
geoSrid: geometry('geo_options', { type: 'point', mode: 'xy', srid: 4000 }),
});
```
**mode**
Type `geometry` has 2 modes for mappings from the database: `tuple` and `xy`.
- `tuple` will be accepted for insert and mapped on select to a tuple. So, the database geometry will be typed as [1,2] with drizzle.
- `xy` will be accepted for insert and mapped on select to an object with x, y coordinates. So, the database geometry will be typed as `{ x: 1, y: 2 }` with drizzle
**type**
The current release has a predefined type: `point`, which is the `geometry(Point)` type in the PostgreSQL PostGIS extension. You can specify any string there if you want to use some other type
# Drizzle Kit updates: `drizzle-kit@0.22.0`
> Release notes here are partially duplicated from [drizzle-kit@0.22.0]()
## New Features
### 🎉 Support for new types
Drizzle Kit can now handle:
- `point` and `line` from PostgreSQL
- `vector` from the PostgreSQL `pg_vector` extension
- `geometry` from the PostgreSQL `PostGIS` extension
### 🎉 New param in drizzle.config - `extensionsFilters`
The PostGIS extension creates a few internal tables in the `public` schema. This means that if you have a database with the PostGIS extension and use `push` or `introspect`, all those tables will be included in `diff` operations. In this case, you would need to specify `tablesFilter`, find all tables created by the extension, and list them in this parameter.
We have addressed this issue so that you won't need to take all these steps. Simply specify `extensionsFilters` with the name of the extension used, and Drizzle will skip all the necessary tables.
Currently, we only support the `postgis` option, but we plan to add more extensions if they create tables in the `public` schema.
The `postgis` option will skip the `geography_columns`, `geometry_columns`, and `spatial_ref_sys` tables
```ts
import { defineConfig } from 'drizzle-kit'
export default defaultConfig({
dialect: "postgresql",
extensionsFilters: ["postgis"],
})
```
## Improvements
### Update zod schemas for database credentials and write tests to all the positive/negative cases
- support full set of SSL params in kit config, provide types from node:tls connection
```ts
import { defineConfig } from 'drizzle-kit'
export default defaultConfig({
dialect: "postgresql",
dbCredentials: {
ssl: true, //"require" | "allow" | "prefer" | "verify-full" | options from node:tls
}
})
```
```ts
import { defineConfig } from 'drizzle-kit'
export default defaultConfig({
dialect: "mysql",
dbCredentials: {
ssl: "", // string | SslOptions (ssl options from mysql2 package)
}
})
```
### Normilized SQLite urls for `libsql` and `better-sqlite3` drivers
Those drivers have different file path patterns, and Drizzle Kit will accept both and create a proper file path format for each
### Updated MySQL and SQLite index-as-expression behavior
In this release MySQL and SQLite will properly map expressions into SQL query. Expressions won't be escaped in string but columns will be
```ts
export const users = sqliteTable(
'users',
{
id: integer('id').primaryKey(),
email: text('email').notNull(),
},
(table) => ({
emailUniqueIndex: uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`),
}),
);
```
```sql
-- before
CREATE UNIQUE INDEX `emailUniqueIndex` ON `users` (`lower("users"."email")`);
-- now
CREATE UNIQUE INDEX `emailUniqueIndex` ON `users` (lower("email"));
```
## Bug Fixes
- [BUG]: multiple constraints not added (only the first one is generated) - [#2341](https://github.com/drizzle-team/drizzle-orm/issues/2341)
- Drizzle Studio: Error: Connection terminated unexpectedly - [#435](https://github.com/drizzle-team/drizzle-kit-mirror/issues/435)
- Unable to run sqlite migrations local - [#432](https://github.com/drizzle-team/drizzle-kit-mirror/issues/432)
- error: unknown option '--config' - [#423](https://github.com/drizzle-team/drizzle-kit-mirror/issues/423)
## How `push` and `generate` works for indexes
### Limitations
#### You should specify a name for your index manually if you have an index on at least one expression
Example
```ts
index().on(table.id, table.email) // will work well and name will be autogeneretaed
index('my_name').on(table.id, table.email) // will work well
// but
index().on(sql`lower(${table.email})`) // error
index('my_name').on(sql`lower(${table.email})`) // will work well
```
#### Push won't generate statements if these fields(list below) were changed in an existing index:
- expressions inside `.on()` and `.using()`
- `.where()` statements
- operator classes `.op()` on columns
If you are using `push` workflows and want to change these fields in the index, you would need to:
- Comment out the index
- Push
- Uncomment the index and change those fields
- Push again
For the `generate` command, `drizzle-kit` will be triggered by any changes in the index for any property in the new drizzle indexes API, so there are no limitations here.
================================================
FILE: changelogs/drizzle-orm/0.31.1.md
================================================
# New Features
## Live Queries 🎉
As of `v0.31.1` Drizzle ORM now has native support for Expo SQLite Live Queries!
We've implemented a native `useLiveQuery` React Hook which observes necessary database changes and automatically re-runs database queries. It works with both SQL-like and Drizzle Queries:
```tsx
import { useLiveQuery, drizzle } from 'drizzle-orm/expo-sqlite';
import { openDatabaseSync } from 'expo-sqlite/next';
import { users } from './schema';
import { Text } from 'react-native';
const expo = openDatabaseSync('db.db');
const db = drizzle(expo);
const App = () => {
// Re-renders automatically when data changes
const { data } = useLiveQuery(db.select().from(users));
// const { data, error, updatedAt } = useLiveQuery(db.query.users.findFirst());
// const { data, error, updatedAt } = useLiveQuery(db.query.users.findMany());
return {JSON.stringify(data)};
};
export default App;
```
We've intentionally not changed the API of ORM itself to stay with conventional React Hook API, so we have `useLiveQuery(databaseQuery)` as opposed to `db.select().from(users).useLive()` or `db.query.users.useFindMany()`
We've also decided to provide `data`, `error` and `updatedAt` fields as a result of hook for concise explicit error handling following practices of `React Query` and `Electric SQL`
================================================
FILE: changelogs/drizzle-orm/0.31.2.md
================================================
- 🎉 Added support for TiDB Cloud Serverless driver:
```ts
import { connect } from '@tidbcloud/serverless';
import { drizzle } from 'drizzle-orm/tidb-serverless';
const client = connect({ url: '...' });
const db = drizzle(client);
await db.select().from(...);
```
================================================
FILE: changelogs/drizzle-orm/0.31.3.md
================================================
### Bug fixed
- 🛠️ Fixed RQB behavior for tables with same names in different schemas
- 🛠️ Fixed [BUG]: Mismatched type hints when using RDS Data API - #2097
### New Prisma-Drizzle extension
```ts
import { PrismaClient } from '@prisma/client';
import { drizzle } from 'drizzle-orm/prisma/pg';
import { User } from './drizzle';
const prisma = new PrismaClient().$extends(drizzle());
const users = await prisma.$drizzle.select().from(User);
```
For more info, check docs: https://orm.drizzle.team/docs/prisma
================================================
FILE: changelogs/drizzle-orm/0.31.4.md
================================================
- Mark prisma clients package as optional - thanks @Cherry
================================================
FILE: changelogs/drizzle-orm/0.32.0-beta.md
================================================
# Preview release for `drizzle-orm@0.32.0` and `drizzle-kit@0.23.0`
> It's not mandatory to upgrade both packages, but if you want to use the new features in both queries and migrations, you will need to upgrade both packages
## New Features
### 🎉 PostgreSQL Sequences
You can now specify sequences in Postgres within any schema you need and define all the available properties
##### **Example**
```ts
import { pgSchema, pgSequence } from "drizzle-orm/pg-core";
// No params specified
export const customSequence = pgSequence("name");
// Sequence with params
export const customSequence = pgSequence("name", {
startWith: 100,
maxValue: 10000,
minValue: 100,
cycle: true,
cache: 10,
increment: 2
});
// Sequence in custom schema
export const customSchema = pgSchema('custom_schema');
export const customSequence = customSchema.sequence("name");
```
### 🎉 PostgreSQL Identity Columns
[Source](https://wiki.postgresql.org/wiki/Don%27t_Do_This#Don.27t_use_serial): As mentioned, the `serial` type in Postgres is outdated and should be deprecated. Ideally, you should not use it. `Identity columns` are the recommended way to specify sequences in your schema, which is why we are introducing the `identity columns` feature
##### **Example**
```ts
import { pgTable, integer, text } from 'drizzle-orm/pg-core'
export const ingredients = pgTable("ingredients", {
id: integer("id").primaryKey().generatedAlwaysAsIdentity({ startWith: 1000 }),
name: text("name").notNull(),
description: text("description"),
});
```
You can specify all properties available for sequences in the `.generatedAlwaysAsIdentity()` function. Additionally, you can specify custom names for these sequences
PostgreSQL docs [reference](https://www.postgresql.org/docs/current/sql-createtable.html#SQL-CREATETABLE-PARMS-GENERATED-IDENTITY).
### 🎉 PostgreSQL Generated Columns
You can now specify generated columns on any column supported by PostgreSQL to use with generated columns
##### **Example** with generated column for `tsvector`
> Note: we will add `tsVector` column type before latest release
```ts
import { SQL, sql } from "drizzle-orm";
import { customType, index, integer, pgTable, text } from "drizzle-orm/pg-core";
const tsVector = customType<{ data: string }>({
dataType() {
return "tsvector";
},
});
export const test = pgTable(
"test",
{
id: integer("id").primaryKey().generatedAlwaysAsIdentity(),
content: text("content"),
contentSearch: tsVector("content_search", {
dimensions: 3,
}).generatedAlwaysAs(
(): SQL => sql`to_tsvector('english', ${test.content})`
),
},
(t) => ({
idx: index("idx_content_search").using("gin", t.contentSearch),
})
);
```
In case you don't need to reference any columns from your table, you can use just `sql` template or a `string`
```ts
export const users = pgTable("users", {
id: integer("id"),
name: text("name"),
generatedName: text("gen_name").generatedAlwaysAs(sql`hello world!`),
generatedName1: text("gen_name1").generatedAlwaysAs("hello world!"),
}),
```
### 🎉 MySQL Generated Columns
You can now specify generated columns on any column supported by MySQL to use with generated columns
You can specify both `stored` and `virtual` options, for more info you can check [MySQL docs](https://dev.mysql.com/doc/refman/8.4/en/create-table-generated-columns.html)
Also MySQL has a few limitation for such columns usage, which is described [here](https://dev.mysql.com/doc/refman/8.4/en/alter-table-generated-columns.html)
Drizzle Kit will also have limitations for `push` command:
1. You can't change the generated constraint expression and type using `push`. Drizzle-kit will ignore this change. To make it work, you would need to `drop the column`, `push`, and then `add a column with a new expression`. This was done due to the complex mapping from the database side, where the schema expression will be modified on the database side and, on introspection, we will get a different string. We can't be sure if you changed this expression or if it was changed and formatted by the database. As long as these are generated columns and `push` is mostly used for prototyping on a local database, it should be fast to `drop` and `create` generated columns. Since these columns are `generated`, all the data will be restored
2. `generate` should have no limitations
##### **Example**
```ts
export const users = mysqlTable("users", {
id: int("id"),
id2: int("id2"),
name: text("name"),
generatedName: text("gen_name").generatedAlwaysAs(
(): SQL => sql`${schema2.users.name} || 'hello'`,
{ mode: "stored" }
),
generatedName1: text("gen_name1").generatedAlwaysAs(
(): SQL => sql`${schema2.users.name} || 'hello'`,
{ mode: "virtual" }
),
}),
```
In case you don't need to reference any columns from your table, you can use just `sql` template or a `string` in `.generatedAlwaysAs()`
### 🎉 SQLite Generated Columns
You can now specify generated columns on any column supported by SQLite to use with generated columns
You can specify both `stored` and `virtual` options, for more info you can check [SQLite docs](https://www.sqlite.org/gencol.html)
Also SQLite has a few limitation for such columns usage, which is described [here](https://www.sqlite.org/gencol.html)
Drizzle Kit will also have limitations for `push` and `generate` command:
1. You can't change the generated constraint expression with the stored type in an existing table. You would need to delete this table and create it again. This is due to SQLite limitations for such actions. We will handle this case in future releases (it will involve the creation of a new table with data migration).
2. You can't add a `stored` generated expression to an existing column for the same reason as above. However, you can add a `virtual` expression to an existing column.
3. You can't change a `stored` generated expression in an existing column for the same reason as above. However, you can change a `virtual` expression.
4. You can't change the generated constraint type from `virtual` to `stored` for the same reason as above. However, you can change from `stored` to `virtual`.
## New Drizzle Kit features
### 🎉 Migrations support for all the new orm features
PostgreSQL sequences, identity columns and generated columns for all dialects
### 🎉 New flag `--force` for `drizzle-kit push`
You can auto-accept all data-loss statements using the push command. It's only available in CLI parameters. Make sure you always use it if you are fine with running data-loss statements on your database
### 🎉 New `migrations` flag `prefix`
You can now customize migration file prefixes to make the format suitable for your migration tools:
- `index` is the default type and will result in `0001_name.sql` file names;
- `supabase` and `timestamp` are equal and will result in `20240627123900_name.sql` file names;
- `unix` will result in unix seconds prefixes `1719481298_name.sql` file names;
- `none` will omit the prefix completely;
##### **Example**: Supabase migrations format
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "postgresql",
migrations: {
prefix: 'supabase'
}
});
```
================================================
FILE: changelogs/drizzle-orm/0.32.0.md
================================================
# Release notes for `drizzle-orm@0.32.0` and `drizzle-kit@0.23.0`
> It's not mandatory to upgrade both packages, but if you want to use the new features in both queries and migrations, you will need to upgrade both packages
## New Features
### 🎉 MySQL `$returningId()` function
MySQL itself doesn't have native support for `RETURNING` after using `INSERT`. There is only one way to do it for `primary keys` with `autoincrement` (or `serial`) types, where you can access `insertId` and `affectedRows` fields. We've prepared an automatic way for you to handle such cases with Drizzle and automatically receive all inserted IDs as separate objects
```ts
import { boolean, int, text, mysqlTable } from 'drizzle-orm/mysql-core';
const usersTable = mysqlTable('users', {
id: int('id').primaryKey(),
name: text('name').notNull(),
verified: boolean('verified').notNull().default(false),
});
const result = await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]).$returningId();
// ^? { id: number }[]
```
Also with Drizzle, you can specify a `primary key` with `$default` function that will generate custom primary keys at runtime. We will also return those generated keys for you in the `$returningId()` call
```ts
import { varchar, text, mysqlTable } from 'drizzle-orm/mysql-core';
import { createId } from '@paralleldrive/cuid2';
const usersTableDefFn = mysqlTable('users_default_fn', {
customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(createId),
name: text('name').notNull(),
});
const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]).$returningId();
// ^? { customId: string }[]
```
> If there is no primary keys -> type will be `{}[]` for such queries
### 🎉 PostgreSQL Sequences
You can now specify sequences in Postgres within any schema you need and define all the available properties
##### **Example**
```ts
import { pgSchema, pgSequence } from "drizzle-orm/pg-core";
// No params specified
export const customSequence = pgSequence("name");
// Sequence with params
export const customSequence = pgSequence("name", {
startWith: 100,
maxValue: 10000,
minValue: 100,
cycle: true,
cache: 10,
increment: 2
});
// Sequence in custom schema
export const customSchema = pgSchema('custom_schema');
export const customSequence = customSchema.sequence("name");
```
### 🎉 PostgreSQL Identity Columns
[Source](https://wiki.postgresql.org/wiki/Don%27t_Do_This#Don.27t_use_serial): As mentioned, the `serial` type in Postgres is outdated and should be deprecated. Ideally, you should not use it. `Identity columns` are the recommended way to specify sequences in your schema, which is why we are introducing the `identity columns` feature
##### **Example**
```ts
import { pgTable, integer, text } from 'drizzle-orm/pg-core'
export const ingredients = pgTable("ingredients", {
id: integer("id").primaryKey().generatedAlwaysAsIdentity({ startWith: 1000 }),
name: text("name").notNull(),
description: text("description"),
});
```
You can specify all properties available for sequences in the `.generatedAlwaysAsIdentity()` function. Additionally, you can specify custom names for these sequences
PostgreSQL docs [reference](https://www.postgresql.org/docs/current/sql-createtable.html#SQL-CREATETABLE-PARMS-GENERATED-IDENTITY).
### 🎉 PostgreSQL Generated Columns
You can now specify generated columns on any column supported by PostgreSQL to use with generated columns
##### **Example** with generated column for `tsvector`
> Note: we will add `tsVector` column type before latest release
```ts
import { SQL, sql } from "drizzle-orm";
import { customType, index, integer, pgTable, text } from "drizzle-orm/pg-core";
const tsVector = customType<{ data: string }>({
dataType() {
return "tsvector";
},
});
export const test = pgTable(
"test",
{
id: integer("id").primaryKey().generatedAlwaysAsIdentity(),
content: text("content"),
contentSearch: tsVector("content_search", {
dimensions: 3,
}).generatedAlwaysAs(
(): SQL => sql`to_tsvector('english', ${test.content})`
),
},
(t) => ({
idx: index("idx_content_search").using("gin", t.contentSearch),
})
);
```
In case you don't need to reference any columns from your table, you can use just `sql` template or a `string`
```ts
export const users = pgTable("users", {
id: integer("id"),
name: text("name"),
generatedName: text("gen_name").generatedAlwaysAs(sql`hello world!`),
generatedName1: text("gen_name1").generatedAlwaysAs("hello world!"),
}),
```
### 🎉 MySQL Generated Columns
You can now specify generated columns on any column supported by MySQL to use with generated columns
You can specify both `stored` and `virtual` options, for more info you can check [MySQL docs](https://dev.mysql.com/doc/refman/8.4/en/create-table-generated-columns.html)
Also MySQL has a few limitation for such columns usage, which is described [here](https://dev.mysql.com/doc/refman/8.4/en/alter-table-generated-columns.html)
Drizzle Kit will also have limitations for `push` command:
1. You can't change the generated constraint expression and type using `push`. Drizzle-kit will ignore this change. To make it work, you would need to `drop the column`, `push`, and then `add a column with a new expression`. This was done due to the complex mapping from the database side, where the schema expression will be modified on the database side and, on introspection, we will get a different string. We can't be sure if you changed this expression or if it was changed and formatted by the database. As long as these are generated columns and `push` is mostly used for prototyping on a local database, it should be fast to `drop` and `create` generated columns. Since these columns are `generated`, all the data will be restored
2. `generate` should have no limitations
##### **Example**
```ts
export const users = mysqlTable("users", {
id: int("id"),
id2: int("id2"),
name: text("name"),
generatedName: text("gen_name").generatedAlwaysAs(
(): SQL => sql`${schema2.users.name} || 'hello'`,
{ mode: "stored" }
),
generatedName1: text("gen_name1").generatedAlwaysAs(
(): SQL => sql`${schema2.users.name} || 'hello'`,
{ mode: "virtual" }
),
}),
```
In case you don't need to reference any columns from your table, you can use just `sql` template or a `string` in `.generatedAlwaysAs()`
### 🎉 SQLite Generated Columns
You can now specify generated columns on any column supported by SQLite to use with generated columns
You can specify both `stored` and `virtual` options, for more info you can check [SQLite docs](https://www.sqlite.org/gencol.html)
Also SQLite has a few limitation for such columns usage, which is described [here](https://www.sqlite.org/gencol.html)
Drizzle Kit will also have limitations for `push` and `generate` command:
1. You can't change the generated constraint expression with the stored type in an existing table. You would need to delete this table and create it again. This is due to SQLite limitations for such actions. We will handle this case in future releases (it will involve the creation of a new table with data migration).
2. You can't add a `stored` generated expression to an existing column for the same reason as above. However, you can add a `virtual` expression to an existing column.
3. You can't change a `stored` generated expression in an existing column for the same reason as above. However, you can change a `virtual` expression.
4. You can't change the generated constraint type from `virtual` to `stored` for the same reason as above. However, you can change from `stored` to `virtual`.
## New Drizzle Kit features
### 🎉 Migrations support for all the new orm features
PostgreSQL sequences, identity columns and generated columns for all dialects
### 🎉 New flag `--force` for `drizzle-kit push`
You can auto-accept all data-loss statements using the push command. It's only available in CLI parameters. Make sure you always use it if you are fine with running data-loss statements on your database
### 🎉 New `migrations` flag `prefix`
You can now customize migration file prefixes to make the format suitable for your migration tools:
- `index` is the default type and will result in `0001_name.sql` file names;
- `supabase` and `timestamp` are equal and will result in `20240627123900_name.sql` file names;
- `unix` will result in unix seconds prefixes `1719481298_name.sql` file names;
- `none` will omit the prefix completely;
##### **Example**: Supabase migrations format
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "postgresql",
migrations: {
prefix: 'supabase'
}
});
```
================================================
FILE: changelogs/drizzle-orm/0.32.1.md
================================================
- Fix typings for indexes and allow creating indexes on 3+ columns mixing columns and expressions - thanks @lbguilherme!
- Added support for "limit 0" in all dialects - closes [#2011](https://github.com/drizzle-team/drizzle-orm/issues/2011) - thanks @sillvva!
- Make inArray and notInArray accept empty list, closes [#1295](https://github.com/drizzle-team/drizzle-orm/issues/1295) - thanks @RemiPeruto!
- fix typo in lt typedoc - thanks @dalechyn!
- fix wrong example in README.md - thanks @7flash!
================================================
FILE: changelogs/drizzle-orm/0.32.2.md
================================================
- Fix AWS Data API type hints bugs in RQB
- Fix set transactions in MySQL bug - thanks @roguesherlock
- Add forwaring dependencies within useLiveQuery, fixes [#2651](https://github.com/drizzle-team/drizzle-orm/issues/2651) - thanks @anstapol
- Export additional types from SQLite package, like `AnySQLiteUpdate` - thanks @veloii
================================================
FILE: changelogs/drizzle-orm/0.33.0.md
================================================
## Breaking changes (for some of postgres.js users)
#### Bugs fixed for this breaking change
- [Open
[BUG]: jsonb always inserted as a json string when using postgres-js](https://github.com/drizzle-team/drizzle-orm/issues/724)
- [[BUG]: jsonb type on postgres implement incorrectly](https://github.com/drizzle-team/drizzle-orm/issues/1511)
> As we are doing with other drivers, we've changed the behavior of PostgreSQL-JS to pass raw JSON values, the same as you see them in the database. So if you are using the PostgreSQL-JS driver and passing data to Drizzle elsewhere, please check the new behavior of the client after it is passed to Drizzle.
> We will update it to ensure it does not override driver behaviors, but this will be done as a complex task for everything in Drizzle in other releases
If you were using `postgres-js` with `jsonb` fields, you might have seen stringified objects in your database, while drizzle insert and select operations were working as expected.
You need to convert those fields from strings to actual JSON objects. To do this, you can use the following query to update your database:
**if you are using jsonb:**
```sql
update table_name
set jsonb_column = (jsonb_column #>> '{}')::jsonb;
```
**if you are using json:**
```sql
update table_name
set json_column = (json_column #>> '{}')::json;
```
We've tested it in several cases, and it worked well, but only if all stringified objects are arrays or objects. If you have primitives like strings, numbers, booleans, etc., you can use this query to update all the fields
**if you are using jsonb:**
```sql
UPDATE table_name
SET jsonb_column = CASE
-- Convert to JSONB if it is a valid JSON object or array
WHEN jsonb_column #>> '{}' LIKE '{%' OR jsonb_column #>> '{}' LIKE '[%' THEN
(jsonb_column #>> '{}')::jsonb
ELSE
jsonb_column
END
WHERE
jsonb_column IS NOT NULL;
```
**if you are using json:**
```sql
UPDATE table_name
SET json_column = CASE
-- Convert to JSON if it is a valid JSON object or array
WHEN json_column #>> '{}' LIKE '{%' OR json_column #>> '{}' LIKE '[%' THEN
(json_column #>> '{}')::json
ELSE
json_column
END
WHERE json_column IS NOT NULL;
```
If nothing works for you and you are blocked, please reach out to me @AndriiSherman. I will try to help you!
## Bug Fixes
- [[BUG]: boolean mode not working with prepared statements (bettersqlite)](https://github.com/drizzle-team/drizzle-orm/issues/2568) - thanks @veloii
- [[BUG]: isTable helper function is not working](https://github.com/drizzle-team/drizzle-orm/issues/2672) - thanks @hajek-raven
- [[BUG]: Documentation is outdated on inArray and notInArray Methods](https://github.com/drizzle-team/drizzle-orm/issues/2690) - thanks @RemiPeruto
================================================
FILE: changelogs/drizzle-orm/0.34.0.md
================================================
## Breaking changes and migrate guide for Turso users
If you are using Turso and libsql, you will need to upgrade your `drizzle.config` and `@libsql/client` package.
1. This version of drizzle-orm will only work with `@libsql/client@0.10.0` or higher if you are using the `migrate` function. For other use cases, you can continue using previous versions(But the suggestion is to upgrade)
To install the latest version, use the command:
```bash
npm i @libsql/client@latest
```
2. Previously, we had a common `drizzle.config` for SQLite and Turso users, which allowed a shared strategy for both dialects. Starting with this release, we are introducing the turso dialect in drizzle-kit. We will evolve and improve Turso as a separate dialect with its own migration strategies.
**Before**
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "sqlite",
schema: "./schema.ts",
out: "./drizzle",
dbCredentials: {
url: "database.db",
},
breakpoints: true,
verbose: true,
strict: true,
});
```
**After**
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "turso",
schema: "./schema.ts",
out: "./drizzle",
dbCredentials: {
url: "database.db",
},
breakpoints: true,
verbose: true,
strict: true,
});
```
If you are using only SQLite, you can use `dialect: "sqlite"`
## LibSQL/Turso and Sqlite migration updates
### SQLite "generate" and "push" statements updates
Starting from this release, we will no longer generate comments like this:
```sql
'/*\n SQLite does not support "Changing existing column type" out of the box, we do not generate automatic migration for that, so it has to be done manually'
+ '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
+ '\n https://www.sqlite.org/lang_altertable.html'
+ '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
+ "\n\n Due to that we don't generate migration automatically and it has to be done manually"
+ '\n*/'
```
We will generate a set of statements, and you can decide if it's appropriate to create data-moving statements instead. Here is an example of the SQL file you'll receive now:
```sql
PRAGMA foreign_keys=OFF;
--> statement-breakpoint
CREATE TABLE `__new_worker` (
`id` integer PRIMARY KEY NOT NULL,
`name` text NOT NULL,
`salary` text NOT NULL,
`job_id` integer,
FOREIGN KEY (`job_id`) REFERENCES `job`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
INSERT INTO `__new_worker`("id", "name", "salary", "job_id") SELECT "id", "name", "salary", "job_id" FROM `worker`;
--> statement-breakpoint
DROP TABLE `worker`;
--> statement-breakpoint
ALTER TABLE `__new_worker` RENAME TO `worker`;
--> statement-breakpoint
PRAGMA foreign_keys=ON;
```
### LibSQL/Turso "generate" and "push" statements updates
Since LibSQL supports more ALTER statements than SQLite, we can generate more statements without recreating your schema and moving all the data, which can be potentially dangerous for production environments.
LibSQL and Turso will now have a separate dialect in the Drizzle config file, meaning that we will evolve Turso and LibSQL independently from SQLite and will aim to support as many features as Turso/LibSQL offer.
With the updated LibSQL migration strategy, you will have the ability to:
- **Change Data Type**: Set a new data type for existing columns.
- **Set and Drop Default Values**: Add or remove default values for existing columns.
- **Set and Drop NOT NULL**: Add or remove the NOT NULL constraint on existing columns.
- **Add References to Existing Columns**: Add foreign key references to existing columns
You can find more information in the [LibSQL documentation](https://github.com/tursodatabase/libsql/blob/main/libsql-sqlite3/doc/libsql_extensions.md#altering-columns)
### LIMITATIONS
- Dropping or altering an index will cause table recreation.
This is because LibSQL/Turso does not support dropping this type of index.
```sql
CREATE TABLE `users` (
`id` integer NOT NULL,
`name` integer,
`age` integer PRIMARY KEY NOT NULL
FOREIGN KEY (`name`) REFERENCES `users1`("id") ON UPDATE no action ON DELETE no action
);
```
- If the table has indexes, altering columns will cause table recreation.
- Drizzle-Kit will drop the indexes, modify the columns, and then recreate the indexes.
- Adding or dropping composite foreign keys is not supported and will cause table recreation
### NOTES
- You can create a reference on any column type, but if you want to insert values, the referenced column must have a unique index or primary key.
```sql
CREATE TABLE parent(a PRIMARY KEY, b UNIQUE, c, d, e, f);
CREATE UNIQUE INDEX i1 ON parent(c, d);
CREATE INDEX i2 ON parent(e);
CREATE UNIQUE INDEX i3 ON parent(f COLLATE nocase);
CREATE TABLE child1(f, g REFERENCES parent(a)); -- Ok
CREATE TABLE child2(h, i REFERENCES parent(b)); -- Ok
CREATE TABLE child3(j, k, FOREIGN KEY(j, k) REFERENCES parent(c, d)); -- Ok
CREATE TABLE child4(l, m REFERENCES parent(e)); -- Error!
CREATE TABLE child5(n, o REFERENCES parent(f)); -- Error!
CREATE TABLE child6(p, q, FOREIGN KEY(p, q) REFERENCES parent(b, c)); -- Error!
CREATE TABLE child7(r REFERENCES parent(c)); -- Error!
```
> **NOTE**: The foreign key for the table child5 is an error because, although the parent key column has a unique index, the index uses a different collating sequence.
See more: https://www.sqlite.org/foreignkeys.html
## A new and easy way to start using drizzle
Current and the only way to do, is to define client yourself and pass it to drizzle
```ts
const client = new Pool({ url: '' });
drizzle(client, { logger: true });
```
But we want to introduce you to a new API, which is a simplified method in addition to the existing one.
Most clients will have a few options to connect, starting with the easiest and most common one, and allowing you to control your client connection as needed.
Let's use `node-postgres` as an example, but the same pattern can be applied to all other clients
```ts
// Finally, one import for all available clients and dialects!
import { drizzle } from 'drizzle-orm'
// Choose a client and use a connection URL — nothing else is needed!
const db1 = await drizzle("node-postgres", process.env.POSTGRES_URL);
// If you need to pass a logger, schema, or other configurations, you can use an object and specify the client-specific URL in the connection
const db2 = await drizzle("node-postgres", {
connection: process.env.POSTGRES_URL,
logger: true
});
// And finally, if you need to use full client/driver-specific types in connections, you can use a URL or host/port/etc. as an object inferred from the underlying client connection types
const db3 = await drizzle("node-postgres", {
connection: {
connectionString: process.env.POSTGRES_URL,
},
});
const db4 = await drizzle("node-postgres", {
connection: {
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB_NAME,
ssl: true,
},
});
```
A few clients will have a slightly different API due to their specific behavior. Let's take a look at them:
For `aws-data-api-pg`, Drizzle will require `resourceArn`, `database`, and `secretArn`, along with any other AWS Data API client types for the connection, such as credentials, region, etc.
```ts
drizzle("aws-data-api-pg", {
connection: {
resourceArn: "",
database: "",
secretArn: "",
},
});
```
For `d1`, the CloudFlare Worker types as described in the [documentation](https://developers.cloudflare.com/d1/get-started/) here will be required.
```ts
drizzle("d1", {
connection: env.DB // CloudFlare Worker Types
})
```
For `vercel-postgres`, nothing is needed since Vercel automatically retrieves the `POSTGRES_URL` from the `.env` file. You can check this [documentation](https://vercel.com/docs/storage/vercel-postgres/quickstart) for more info
```ts
drizzle("vercel-postgres")
```
> Note that the first example with the client is still available and not deprecated. You can use it if you don't want to await the drizzle object. The new way of defining drizzle is designed to make it easier to import from one place and get autocomplete for all the available clients
## Optional names for columns and callback in drizzle table
We believe that schema definition in Drizzle is extremely powerful and aims to be as close to SQL as possible while adding more helper functions for JS runtime values.
However, there are a few areas that could be improved, which we addressed in this release. These include:
- Unnecessary database column names when TypeScript keys are essentially just copies of them
- A callback that provides all column types available for a specific table.
Let's look at an example with PostgreSQL (this applies to all the dialects supported by Drizzle)
**Previously**
```ts
import { boolean, pgTable, text, uuid } from "drizzle-orm/pg-core";
export const ingredients = pgTable("ingredients", {
id: uuid("id").defaultRandom().primaryKey(),
name: text("name").notNull(),
description: text("description"),
inStock: boolean("in_stock").default(true),
});
```
The previous table definition will still be valid in the new release, but it can be replaced with this instead
```ts
import { pgTable } from "drizzle-orm/pg-core";
export const ingredients = pgTable("ingredients", (t) => ({
id: t.uuid().defaultRandom().primaryKey(),
name: t.text().notNull(),
description: t.text(),
inStock: t.boolean("in_stock").default(true),
}));
```
## New `casing` param in `drizzle-orm` and `drizzle-kit`
There are more improvements you can make to your schema definition. The most common way to name your variables in a database and in TypeScript code is usually `snake_case` in the database and `camelCase` in the code. For this case, in Drizzle, you can now define a naming strategy in your database to help Drizzle map column keys automatically. Let's take a table from the previous example and make it work with the new casing API in Drizzle
Table can now become:
```ts
import { pgTable } from "drizzle-orm/pg-core";
export const ingredients = pgTable("ingredients", (t) => ({
id: t.uuid().defaultRandom().primaryKey(),
name: t.text().notNull(),
description: t.text(),
inStock: t.boolean().default(true),
}));
```
As you can see, `inStock` doesn't have a database name alias, but by defining the casing configuration at the connection level, all queries will automatically map it to `snake_case`
```ts
const db = await drizzle('node-postgres', { connection: '', casing: 'snake_case' })
```
For `drizzle-kit` migrations generation you should also specify `casing` param in drizzle config, so you can be sure you casing strategy will be applied to drizzle-kit as well
```ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: "postgresql",
schema: "./schema.ts",
dbCredentials: {
url: "postgresql://postgres:password@localhost:5432/db",
},
casing: "snake_case",
});
```
## New "count" API
Before this release to count entities in a table, you would need to do this:
```ts
const res = await db.select({ count: sql`count(*)` }).from(users);
const count = res[0].count;
```
The new API will look like this:
```ts
// how many users are in the database
const count: number = await db.$count(users);
// how many users with the name "Dan" are in the database
const count: number = await db.$count(users, eq(name, "Dan"));
```
This can also work as a subquery and within relational queries
```ts
const users = await db.select({
...users,
postsCount: db.$count(posts, eq(posts.authorId, users.id))
});
const users = await db.query.users.findMany({
extras: {
postsCount: db.$count(posts, eq(posts.authorId, users.id))
}
})
```
## Ability to execute raw strings instead of using SQL templates for raw queries
Previously, you would have needed to do this to execute a raw query with Drizzle
```ts
import { sql } from 'drizzle-orm'
db.execute(sql`select * from ${users}`);
// or
db.execute(sql.raw(`select * from ${users}`));
```
You can now do this as well
```ts
db.execute('select * from users')
```
================================================
FILE: changelogs/drizzle-orm/0.34.1.md
================================================
- Fixed dynamic imports for CJS and MJS in the `/connect` module
================================================
FILE: changelogs/drizzle-orm/0.35.0.md
================================================
# Important change after 0.34.0 release
## Updated the init Drizzle database API
The API from version 0.34.0 turned out to be unusable and needs to be changed. You can read more about our decisions in [this discussion](https://github.com/drizzle-team/drizzle-orm/discussions/3097)
If you still want to use the new API introduced in 0.34.0, which can create driver clients for you under the hood, you can now do so
```ts
import { drizzle } from "drizzle-orm/node-postgres";
const db = drizzle(process.env.DATABASE_URL);
// or
const db = drizzle({
connection: process.env.DATABASE_URL
});
const db = drizzle({
connection: {
user: "...",
password: "...",
host: "...",
port: 4321,
db: "...",
},
});
// if you need to pass logger or schema
const db = drizzle({
connection: process.env.DATABASE_URL,
logger: true,
schema: schema,
});
```
in order to not introduce breaking change - we will still leave support for deprecated API until V1 release.
It will degrade autocomplete performance in connection params due to `DatabaseDriver` | `ConnectionParams` types collision,
but that's a decent compromise against breaking changes
```ts
import { drizzle } from "drizzle-orm/node-postgres";
import { Pool } from "pg";
const client = new Pool({ connectionString: process.env.DATABASE_URL });
const db = drizzle(client); // deprecated but available
// new version
const db = drizzle({
client: client,
});
```
# New Features
## New .orderBy() and .limit() functions in update and delete statements SQLite and MySQL
You now have more options for the `update` and `delete` query builders in MySQL and SQLite
**Example**
```ts
await db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name));
await db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name));
```
## New `drizzle.mock()` function
There were cases where you didn't need to provide a driver to the Drizzle object, and this served as a workaround
```ts
const db = drizzle({} as any)
```
Now you can do this using a mock function
```ts
const db = drizzle.mock()
```
There is no valid production use case for this, but we used it in situations where we needed to check types, etc., without making actual database calls or dealing with driver creation. If anyone was using it, please switch to using mocks now
# Internal updates
- Upgraded TS in codebase to the version 5.6.3
# Bug fixes
- [[BUG]: New $count API error with @neondatabase/serverless](https://github.com/drizzle-team/drizzle-orm/issues/3081)
================================================
FILE: changelogs/drizzle-orm/0.35.1.md
================================================
- Updated internal versions for the drizzle-kit and drizzle-orm packages. Changes were introduced in the last minor release, and you are required to upgrade both packages to ensure they work as expected
================================================
FILE: changelogs/drizzle-orm/0.35.2.md
================================================
- Fix issues with importing in several environments after updating the Drizzle driver implementation
We've added approximately 240 tests to check the ESM and CJS builds for all the drivers we have. You can check them [here](https://github.com/drizzle-team/drizzle-orm/tree/main/integration-tests/js-tests/driver-init)
- Fixed [[BUG]: Type Error in PgTransaction Missing $client Property After Upgrading to drizzle-orm@0.35.1](https://github.com/drizzle-team/drizzle-orm/issues/3140)
- Fixed [[BUG]: New critical Build error drizzle 0.35.0 deploying on Cloudflare ](https://github.com/drizzle-team/drizzle-orm/issues/3137)
================================================
FILE: changelogs/drizzle-orm/0.35.3.md
================================================
# New LibSQL driver modules
Drizzle now has native support for all `@libsql/client` driver variations:
1. `@libsql/client` - defaults to node import, automatically changes to web if target or platform is set for bundler, e.g. `esbuild --platform=browser`
```ts
import { drizzle } from 'drizzle-orm/libsql';
const db = drizzle({ connection: {
url: process.env.DATABASE_URL,
authToken: process.env.DATABASE_AUTH_TOKEN
}});
```
2. `@libsql/client/node` node compatible module, supports :memory:, file, wss, http and turso connection protocols
```ts
import { drizzle } from 'drizzle-orm/libsql/node';
const db = drizzle({ connection: {
url: process.env.DATABASE_URL,
authToken: process.env.DATABASE_AUTH_TOKEN
}});
```
3. `@libsql/client/web` module for fullstack web frameworks like next, nuxt, astro, etc.
```ts
import { drizzle } from 'drizzle-orm/libsql/web';
const db = drizzle({ connection: {
url: process.env.DATABASE_URL,
authToken: process.env.DATABASE_AUTH_TOKEN
}});
```
4. `@libsql/client/http` module for http and https connection protocols
```ts
import { drizzle } from 'drizzle-orm/libsql/http';
const db = drizzle({ connection: {
url: process.env.DATABASE_URL,
authToken: process.env.DATABASE_AUTH_TOKEN
}});
```
5. `@libsql/client/ws` module for ws and wss connection protocols
```ts
import { drizzle } from 'drizzle-orm/libsql/ws';
const db = drizzle({ connection: {
url: process.env.DATABASE_URL,
authToken: process.env.DATABASE_AUTH_TOKEN
}});
```
6. `@libsql/client/sqlite3` module for :memory: and file connection protocols
```ts
import { drizzle } from 'drizzle-orm/libsql/wasm';
const db = drizzle({ connection: {
url: process.env.DATABASE_URL,
authToken: process.env.DATABASE_AUTH_TOKEN
}});
```
7. `@libsql/client-wasm` Separate experimental package for WASM
```ts
import { drizzle } from 'drizzle-orm/libsql';
const db = drizzle({ connection: {
url: process.env.DATABASE_URL,
authToken: process.env.DATABASE_AUTH_TOKEN
}});
```
================================================
FILE: changelogs/drizzle-orm/0.36.0.md
================================================
> This version of `drizzle-orm` requires `drizzle-kit@0.27.0` to enable all new features
# New Features
## Row-Level Security (RLS)
With Drizzle, you can enable Row-Level Security (RLS) for any Postgres table, create policies with various options, and define and manage the roles those policies apply to.
Drizzle supports a raw representation of Postgres policies and roles that can be used in any way you want. This works with popular Postgres database providers such as `Neon` and `Supabase`.
In Drizzle, we have specific predefined RLS roles and functions for RLS with both database providers, but you can also define your own logic.
### Enable RLS
If you just want to enable RLS on a table without adding policies, you can use `.enableRLS()`
As mentioned in the PostgreSQL documentation:
> If no policy exists for the table, a default-deny policy is used, meaning that no rows are visible or can be modified.
Operations that apply to the whole table, such as TRUNCATE and REFERENCES, are not subject to row security.
```ts
import { integer, pgTable } from 'drizzle-orm/pg-core';
export const users = pgTable('users', {
id: integer(),
}).enableRLS();
```
> If you add a policy to a table, RLS will be enabled automatically. So, there’s no need to explicitly enable RLS when adding policies to a table.
### Roles
Currently, Drizzle supports defining roles with a few different options, as shown below. Support for more options will be added in a future release.
```ts
import { pgRole } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin', { createRole: true, createDb: true, inherit: true });
```
If a role already exists in your database, and you don’t want drizzle-kit to ‘see’ it or include it in migrations, you can mark the role as existing.
```ts
import { pgRole } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin').existing();
```
### Policies
To fully leverage RLS, you can define policies within a Drizzle table.
> In PostgreSQL, policies should be linked to an existing table. Since policies are always associated with a specific table, we decided that policy definitions should be defined as a parameter of `pgTable`
**Example of pgPolicy with all available properties**
```ts
import { sql } from 'drizzle-orm';
import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
pgPolicy('policy', {
as: 'permissive',
to: admin,
for: 'delete',
using: sql``,
withCheck: sql``,
}),
]);
```
**Link Policy to an existing table**
There are situations where you need to link a policy to an existing table in your database.
The most common use case is with database providers like `Neon` or `Supabase`, where you need to add a policy
to their existing tables. In this case, you can use the `.link()` API
```ts
import { sql } from "drizzle-orm";
import { pgPolicy } from "drizzle-orm/pg-core";
import { authenticatedRole, realtimeMessages } from "drizzle-orm/supabase";
export const policy = pgPolicy("authenticated role insert policy", {
for: "insert",
to: authenticatedRole,
using: sql``,
}).link(realtimeMessages);
```
### Migrations
If you are using drizzle-kit to manage your schema and roles, there may be situations where you want to refer to roles that are not defined in your Drizzle schema. In such cases, you may want drizzle-kit to skip managing these roles without having to define each role in your drizzle schema and marking it with `.existing()`.
In these cases, you can use `entities.roles` in `drizzle.config.ts`. For a complete reference, refer to the the [`drizzle.config.ts`](https://orm.drizzle.team/docs/drizzle-config-file) documentation.
By default, `drizzle-kit` does not manage roles for you, so you will need to enable this feature in `drizzle.config.ts`.
```ts {12-14}
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
dialect: 'postgresql',
schema: "./drizzle/schema.ts",
dbCredentials: {
url: process.env.DATABASE_URL!
},
verbose: true,
strict: true,
entities: {
roles: true
}
});
```
In case you need additional configuration options, let's take a look at a few more examples.
**You have an `admin` role and want to exclude it from the list of manageable roles**
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
exclude: ['admin']
}
}
});
```
**You have an `admin` role and want to include it in the list of manageable roles**
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
include: ['admin']
}
}
});
```
**If you are using `Neon` and want to exclude Neon-defined roles, you can use the provider option**
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
provider: 'neon'
}
}
});
```
**If you are using `Supabase` and want to exclude Supabase-defined roles, you can use the provider option**
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
provider: 'supabase'
}
}
});
```
> You may encounter situations where Drizzle is slightly outdated compared to new roles specified by your database provider.
In such cases, you can use the `provider` option and `exclude` additional roles:
```ts
// drizzle.config.ts
import { defineConfig } from "drizzle-kit";
export default defineConfig({
...
entities: {
roles: {
provider: 'supabase',
exclude: ['new_supabase_role']
}
}
});
```
### RLS on views
With Drizzle, you can also specify RLS policies on views. For this, you need to use `security_invoker` in the view's WITH options. Here is a small example:
```ts {5}
...
export const roomsUsersProfiles = pgView("rooms_users_profiles")
.with({
securityInvoker: true,
})
.as((qb) =>
qb
.select({
...getTableColumns(roomsUsers),
email: profiles.email,
})
.from(roomsUsers)
.innerJoin(profiles, eq(roomsUsers.userId, profiles.id))
);
```
### Using with Neon
The Neon Team helped us implement their vision of a wrapper on top of our raw policies API. We defined a specific
`/neon` import with the `crudPolicy` function that includes predefined functions and Neon's default roles.
Here's an example of how to use the `crudPolicy` function:
```ts
import { crudPolicy } from 'drizzle-orm/neon';
import { integer, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
crudPolicy({ role: admin, read: true, modify: false }),
]);
```
This policy is equivalent to:
```ts
import { sql } from 'drizzle-orm';
import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
pgPolicy(`crud-${admin.name}-policy-insert`, {
for: 'insert',
to: admin,
withCheck: sql`false`,
}),
pgPolicy(`crud-${admin.name}-policy-update`, {
for: 'update',
to: admin,
using: sql`false`,
withCheck: sql`false`,
}),
pgPolicy(`crud-${admin.name}-policy-delete`, {
for: 'delete',
to: admin,
using: sql`false`,
}),
pgPolicy(`crud-${admin.name}-policy-select`, {
for: 'select',
to: admin,
using: sql`true`,
}),
]);
```
`Neon` exposes predefined `authenticated` and `anaonymous` roles and related functions. If you are using `Neon` for RLS, you can use these roles, which are marked as existing, and the related functions in your RLS queries.
```ts
// drizzle-orm/neon
export const authenticatedRole = pgRole('authenticated').existing();
export const anonymousRole = pgRole('anonymous').existing();
export const authUid = (userIdColumn: AnyPgColumn) => sql`(select auth.user_id() = ${userIdColumn})`;
```
For example, you can use the `Neon` predefined roles and functions like this:
```ts
import { sql } from 'drizzle-orm';
import { authenticatedRole } from 'drizzle-orm/neon';
import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
pgPolicy(`policy-insert`, {
for: 'insert',
to: authenticatedRole,
withCheck: sql`false`,
}),
]);
```
### Using with Supabase
We also have a `/supabase` import with a set of predefined roles marked as existing, which you can use in your schema.
This import will be extended in a future release with more functions and helpers to make using RLS and `Supabase` simpler.
```ts
// drizzle-orm/supabase
export const anonRole = pgRole('anon').existing();
export const authenticatedRole = pgRole('authenticated').existing();
export const serviceRole = pgRole('service_role').existing();
export const postgresRole = pgRole('postgres_role').existing();
export const supabaseAuthAdminRole = pgRole('supabase_auth_admin').existing();
```
For example, you can use the `Supabase` predefined roles like this:
```ts
import { sql } from 'drizzle-orm';
import { serviceRole } from 'drizzle-orm/supabase';
import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core';
export const admin = pgRole('admin');
export const users = pgTable('users', {
id: integer(),
}, (t) => [
pgPolicy(`policy-insert`, {
for: 'insert',
to: serviceRole,
withCheck: sql`false`,
}),
]);
```
The `/supabase` import also includes predefined tables and functions that you can use in your application
```ts
// drizzle-orm/supabase
const auth = pgSchema('auth');
export const authUsers = auth.table('users', {
id: uuid().primaryKey().notNull(),
});
const realtime = pgSchema('realtime');
export const realtimeMessages = realtime.table(
'messages',
{
id: bigserial({ mode: 'bigint' }).primaryKey(),
topic: text().notNull(),
extension: text({
enum: ['presence', 'broadcast', 'postgres_changes'],
}).notNull(),
},
);
export const authUid = sql`(select auth.uid())`;
export const realtimeTopic = sql`realtime.topic()`;
```
This allows you to use it in your code, and Drizzle Kit will treat them as existing databases,
using them only as information to connect to other entities
```ts
import { foreignKey, pgPolicy, pgTable, text, uuid } from "drizzle-orm/pg-core";
import { sql } from "drizzle-orm/sql";
import { authenticatedRole, authUsers } from "drizzle-orm/supabase";
export const profiles = pgTable(
"profiles",
{
id: uuid().primaryKey().notNull(),
email: text().notNull(),
},
(table) => [
foreignKey({
columns: [table.id],
// reference to the auth table from Supabase
foreignColumns: [authUsers.id],
name: "profiles_id_fk",
}).onDelete("cascade"),
pgPolicy("authenticated can view all profiles", {
for: "select",
// using predefined role from Supabase
to: authenticatedRole,
using: sql`true`,
}),
]
);
```
Let's check an example of adding a policy to a table that exists in `Supabase`
```ts
import { sql } from "drizzle-orm";
import { pgPolicy } from "drizzle-orm/pg-core";
import { authenticatedRole, realtimeMessages } from "drizzle-orm/supabase";
export const policy = pgPolicy("authenticated role insert policy", {
for: "insert",
to: authenticatedRole,
using: sql``,
}).link(realtimeMessages);
```
# Bug fixes
- [[BUG]: postgres-js driver throws error when using new { client } constructor arguments ](https://github.com/drizzle-team/drizzle-orm/issues/3176)
================================================
FILE: changelogs/drizzle-orm/0.36.1.md
================================================
# Bug Fixes
- [[BUG]: Using sql.placeholder with limit and/or offset for a prepared statement produces TS error](https://github.com/drizzle-team/drizzle-orm/issues/2146) - thanks @L-Mario564
- [[BUG] If a query I am trying to modify with a dynamic query (....$dynamic()) contains any placeholders, I'm getting an error that says No value for placeholder.... provided](https://github.com/drizzle-team/drizzle-orm/issues/2272) - thanks @L-Mario564
- [[BUG]: Error thrown when trying to insert an array of new rows using generatedAlwaysAsIdentity() for the id column](https://github.com/drizzle-team/drizzle-orm/issues/2849) - thanks @L-Mario564
- [[BUG]: Unable to Use BigInt Types with Bun and Drizzle](https://github.com/drizzle-team/drizzle-orm/issues/2603) - thanks @L-Mario564
================================================
FILE: changelogs/drizzle-orm/0.36.2.md
================================================
# New Features
- [Support more types in like, notLike, ilike and notIlike expressions](https://github.com/drizzle-team/drizzle-orm/pull/2805)
# Bug and typo fixes
- Fixed typos in repository: thanks @armandsalle, @masto, @wackbyte, @Asher-JH, @MaxLeiter
- [Fixed .generated behavior with non-strict tsconfig](https://github.com/drizzle-team/drizzle-orm/pull/3542)
- [Fix Drizzle ORM for expo-sqlite](https://github.com/drizzle-team/drizzle-orm/pull/3197)
- [Fixed lack of schema name on columns in sql](https://github.com/drizzle-team/drizzle-orm/pull/3531)
- [fix: Adjust neon http driver entity kind](https://github.com/drizzle-team/drizzle-orm/pull/3424)
- [Export PgIntegerBuilderInitial type](https://github.com/drizzle-team/drizzle-orm/pull/2846)
- [[MySQL] Correct $returningId() implementation to correctly store selected fields](https://github.com/drizzle-team/drizzle-orm/pull/2975)
================================================
FILE: changelogs/drizzle-orm/0.36.3.md
================================================
# New Features
## Support for `UPDATE ... FROM` in PostgreSQL and SQLite
As the SQLite documentation mentions:
> [!NOTE]
> The UPDATE-FROM idea is an extension to SQL that allows an UPDATE statement to be driven by other tables in the database.
The "target" table is the specific table that is being updated. With UPDATE-FROM you can join the target table
against other tables in the database in order to help compute which rows need updating and what
the new values should be on those rows
Similarly, the PostgreSQL documentation states:
> [!NOTE]
> A table expression allowing columns from other tables to appear in the WHERE condition and update expressions
Drizzle also supports this feature starting from this version
For example, current query:
```ts
await db
.update(users)
.set({ cityId: cities.id })
.from(cities)
.where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John')))
```
Will generate this sql
```sql
update "users" set "city_id" = "cities"."id"
from "cities"
where ("cities"."name" = $1 and "users"."name" = $2)
-- params: [ 'Seattle', 'John' ]
```
You can also alias tables that are joined (in PG, you can also alias the updating table too).
```ts
const c = alias(cities, 'c');
await db
.update(users)
.set({ cityId: c.id })
.from(c);
```
Will generate this sql
```sql
update "users" set "city_id" = "c"."id"
from "cities" "c"
```
In PostgreSQL, you can also return columns from the joined tables.
```ts
const updatedUsers = await db
.update(users)
.set({ cityId: cities.id })
.from(cities)
.returning({ id: users.id, cityName: cities.name });
```
Will generate this sql
```sql
update "users" set "city_id" = "cities"."id"
from "cities"
returning "users"."id", "cities"."name"
```
## Support for `INSERT INTO ... SELECT` in all dialects
As the SQLite documentation mentions:
> [!NOTE]
> The second form of the INSERT statement contains a SELECT statement instead of a VALUES clause.
A new entry is inserted into the table for each row of data returned by executing the SELECT statement.
If a column-list is specified, the number of columns in the result of the SELECT must be the same as
the number of items in the column-list. Otherwise, if no column-list is specified, the number of
columns in the result of the SELECT must be the same as the number of columns in the table.
Any SELECT statement, including compound SELECTs and SELECT statements with ORDER BY and/or LIMIT clauses,
may be used in an INSERT statement of this form.
> [!CAUTION]
> To avoid a parsing ambiguity, the SELECT statement should always contain a WHERE clause, even if that clause is simply "WHERE true", if the upsert-clause is present. Without the WHERE clause, the parser does not know if the token "ON" is part of a join constraint on the SELECT, or the beginning of the upsert-clause.
As the PostgreSQL documentation mentions:
> [!NOTE]
> A query (SELECT statement) that supplies the rows to be inserted
And as the MySQL documentation mentions:
> [!NOTE]
> With INSERT ... SELECT, you can quickly insert many rows into a table from the result of a SELECT statement, which can select from one or many tables
Drizzle supports the current syntax for all dialects, and all of them share the same syntax. Let's review some common scenarios and API usage.
There are several ways to use select inside insert statements, allowing you to choose your preferred approach:
- You can pass a query builder inside the select function.
- You can use a query builder inside a callback.
- You can pass an SQL template tag with any custom select query you want to use
**Query Builder**
```ts
const insertedEmployees = await db
.insert(employees)
.select(
db.select({ name: users.name }).from(users).where(eq(users.role, 'employee'))
)
.returning({
id: employees.id,
name: employees.name
});
```
```ts
const qb = new QueryBuilder();
await db.insert(employees).select(
qb.select({ name: users.name }).from(users).where(eq(users.role, 'employee'))
);
```
**Callback**
```ts
await db.insert(employees).select(
() => db.select({ name: users.name }).from(users).where(eq(users.role, 'employee'))
);
```
```ts
await db.insert(employees).select(
(qb) => qb.select({ name: users.name }).from(users).where(eq(users.role, 'employee'))
);
```
**SQL template tag**
```ts
await db.insert(employees).select(
sql`select "users"."name" as "name" from "users" where "users"."role" = 'employee'`
);
```
```ts
await db.insert(employees).select(
() => sql`select "users"."name" as "name" from "users" where "users"."role" = 'employee'`
);
```
================================================
FILE: changelogs/drizzle-orm/0.36.4.md
================================================
# New Package: `drizzle-seed`
> [!NOTE]
> `drizzle-seed` can only be used with `drizzle-orm@0.36.4` or higher. Versions lower than this may work at runtime but could have type issues and identity column issues, as this patch was introduced in `drizzle-orm@0.36.4`
## Full Reference
The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview)
## Basic Usage
In this example we will create 10 users with random names and ids
```ts {12}
import { pgTable, integer, text } from "drizzle-orm/pg-core";
import { drizzle } from "drizzle-orm/node-postgres";
import { seed } from "drizzle-seed";
const users = pgTable("users", {
id: integer().primaryKey(),
name: text().notNull(),
});
async function main() {
const db = drizzle(process.env.DATABASE_URL!);
await seed(db, { users });
}
main();
```
## Options
**`count`**
By default, the `seed` function will create 10 entities.
However, if you need more for your tests, you can specify this in the seed options object
```ts
await seed(db, schema, { count: 1000 });
```
**`seed`**
If you need a seed to generate a different set of values for all subsequent runs, you can define a different number
in the `seed` option. Any new number will generate a unique set of values
```ts
await seed(db, schema, { seed: 12345 });
```
The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview)
# Features
## Added `OVERRIDING SYSTEM VALUE` api to db.insert()
If you want to force you own values for `GENERATED ALWAYS AS IDENTITY` columns, you can use `OVERRIDING SYSTEM VALUE`
As PostgreSQL docs mentions
> In an INSERT command, if ALWAYS is selected, a user-specified value is only accepted if the INSERT statement specifies OVERRIDING SYSTEM VALUE. If BY DEFAULT is selected, then the user-specified value takes precedence
```ts
await db.insert(identityColumnsTable).overridingSystemValue().values([
{ alwaysAsIdentity: 2 },
]);
```
## Added `.$withAuth()` API for Neon HTTP driver
Using this API, Drizzle will send you an auth token to authorize your query. It can be used with any query available in Drizzle by simply adding `.$withAuth()` before it. This token will be used for a specific query
Examples
```ts
const token = 'HdncFj1Nm'
await db.$withAuth(token).select().from(usersTable);
await db.$withAuth(token).update(usersTable).set({ name: 'CHANGED' }).where(eq(usersTable.name, 'TARGET'))
```
# Bug Fixes
- [[BUG]: TypeScript error Please install '@neondatabase/serverless' to allow Drizzle ORM to connect to the database](https://github.com/drizzle-team/drizzle-orm/issues/3521)
================================================
FILE: changelogs/drizzle-orm/0.37.0.md
================================================
# New Dialects
### 🎉 `SingleStore` dialect is now available in Drizzle
Thanks to the SingleStore team for creating a PR with all the necessary changes to support the MySQL-compatible part of SingleStore. You can already start using it with Drizzle. The SingleStore team will also help us iterate through updates and make more SingleStore-specific features available in Drizzle
```ts
import { int, singlestoreTable, varchar } from 'drizzle-orm/singlestore-core';
import { drizzle } from 'drizzle-orm/singlestore';
export const usersTable = singlestoreTable('users_table', {
id: int().primaryKey(),
name: varchar({ length: 255 }).notNull(),
age: int().notNull(),
email: varchar({ length: 255 }).notNull().unique(),
});
...
const db = drizzle(process.env.DATABASE_URL!);
db.select()...
```
You can check out our [Getting started guides](https://orm.drizzle.team/docs/get-started/singlestore-new) to try SingleStore!
# New Drivers
### 🎉 `SQLite Durable Objects` driver is now available in Drizzle
You can now query SQLite Durable Objects in Drizzle!
For the full example, please check our [Get Started](https://orm.drizzle.team/docs/get-started/do-new) Section
```ts
///
import { drizzle, DrizzleSqliteDODatabase } from 'drizzle-orm/durable-sqlite';
import { DurableObject } from 'cloudflare:workers'
import { migrate } from 'drizzle-orm/durable-sqlite/migrator';
import migrations from '../drizzle/migrations';
import { usersTable } from './db/schema';
export class MyDurableObject1 extends DurableObject {
storage: DurableObjectStorage;
db: DrizzleSqliteDODatabase;
constructor(ctx: DurableObjectState, env: Env) {
super(ctx, env);
this.storage = ctx.storage;
this.db = drizzle(this.storage, { logger: false });
}
async migrate() {
migrate(this.db, migrations);
}
async insert(user: typeof usersTable.$inferInsert) {
await this.db.insert(usersTable).values(user);
}
async select() {
return this.db.select().from(usersTable);
}
}
export default {
/**
* This is the standard fetch handler for a Cloudflare Worker
*
* @param request - The request submitted to the Worker from the client
* @param env - The interface to reference bindings declared in wrangler.toml
* @param ctx - The execution context of the Worker
* @returns The response to be sent back to the client
*/
async fetch(request: Request, env: Env): Promise {
const id: DurableObjectId = env.MY_DURABLE_OBJECT1.idFromName('durable-object');
const stub = env.MY_DURABLE_OBJECT1.get(id);
await stub.migrate();
await stub.insert({
name: 'John',
age: 30,
email: 'john@example.com',
})
console.log('New user created!')
const users = await stub.select();
console.log('Getting all users from the database: ', users)
return new Response();
}
}
```
# Bug fixes
- [[BUG]: $with is undefined on withReplicas](https://github.com/drizzle-team/drizzle-orm/issues/1834)
- [[BUG]: Neon serverless driver accepts authToken as a promise, but the $withAuth does not](https://github.com/drizzle-team/drizzle-orm/issues/3597)
================================================
FILE: changelogs/drizzle-orm/0.38.0.md
================================================
# Types breaking changes
A few internal types were changed and extra generic types for length of column types were added in this release. It won't affect anyone, unless you are using those internal types for some custom wrappers, logic, etc. Here is a list of all types that were changed, so if you are relying on those, please review them before upgrading
- `MySqlCharBuilderInitial`
- `MySqlVarCharBuilderInitial`
- `PgCharBuilderInitial`
- `PgArrayBuilder`
- `PgArray`
- `PgVarcharBuilderInitial`
- `PgBinaryVectorBuilderInitial`
- `PgBinaryVectorBuilder`
- `PgBinaryVector`
- `PgHalfVectorBuilderInitial`
- `PgHalfVectorBuilder`
- `PgHalfVector`
- `PgVectorBuilderInitial`
- `PgVectorBuilder`
- `PgVector`
- `SQLiteTextBuilderInitial`
# New Features
- Added new function `getViewSelectedFields`
- Added `$inferSelect` function to views
- Added `InferSelectViewModel` type for views
- Added `isView` function
# Validator packages updates
- `drizzle-zod` has been completely rewritten. You can find detailed information about it [here](https://github.com/drizzle-team/drizzle-orm/blob/main/changelogs/drizzle-zod/0.6.0.md)
- `drizzle-valibot` has been completely rewritten. You can find detailed information about it [here](https://github.com/drizzle-team/drizzle-orm/blob/main/changelogs/drizzle-valibot/0.3.0.md)
- `drizzle-typebox` has been completely rewritten. You can find detailed information about it [here](https://github.com/drizzle-team/drizzle-orm/blob/main/changelogs/drizzle-typebox/0.2.0.md)
Thanks to @L-Mario564 for making more updates than we expected to be shipped in this release. We'll copy his message from a PR regarding improvements made in this release:
- Output for all packages are now unminified, makes exploring the compiled code easier when published to npm.
- Smaller footprint. Previously, we imported the column types at runtime for each dialect, meaning that for example, if you're just using Postgres then you'd likely only have drizzle-orm and drizzle-orm/pg-core in the build output of your app; however, these packages imported all dialects which could lead to mysql-core and sqlite-core being bundled as well even if they're unused in your app. This is now fixed.
- Slight performance gain. To determine the column data type we used the is function which performs a few checks to ensure the column data type matches. This was slow, as these checks would pile up every quickly when comparing all data types for many fields in a table/view. The easier and faster alternative is to simply go off of the column's columnType property.
- Some changes had to be made at the type level in the ORM package for better compatibility with drizzle-valibot.
And a set of new features
- `createSelectSchema` function now also accepts views and enums.
- New function: `createUpdateSchema`, for use in updating queries.
- New function: `createSchemaFactory`, to provide more advanced options and to avoid bloating the parameters of the other schema functions
# Bug fixes
- [[FEATURE]: publish packages un-minified](https://github.com/drizzle-team/drizzle-orm/issues/2247)
- [Don't allow unknown keys in drizzle-zod refinement](https://github.com/drizzle-team/drizzle-orm/issues/573)
- [[BUG]:drizzle-zod not working with pgSchema](https://github.com/drizzle-team/drizzle-orm/issues/1458)
- [Add createUpdateSchema to drizzle-zod](https://github.com/drizzle-team/drizzle-orm/issues/503)
- [[BUG]:drizzle-zod produces wrong type](https://github.com/drizzle-team/drizzle-orm/issues/1110)
- [[BUG]:Drizzle-zod:Boolean and Serial types from Schema are defined as enum when using CreateInsertSchema and CreateSelectSchema](https://github.com/drizzle-team/drizzle-orm/issues/1327)
- [[BUG]: Drizzle typebox enum array wrong schema and type](https://github.com/drizzle-team/drizzle-orm/issues/1345)
- [[BUG]:drizzle-zod not working with pgSchema](https://github.com/drizzle-team/drizzle-orm/issues/1458)
- [[BUG]: drizzle-zod not parsing arrays correctly](https://github.com/drizzle-team/drizzle-orm/issues/1609)
- [[BUG]: Drizzle typebox not supporting array](https://github.com/drizzle-team/drizzle-orm/issues/1810)
- [[FEATURE]: Export factory functions from drizzle-zod to allow usage with extended Zod classes](https://github.com/drizzle-team/drizzle-orm/issues/2245)
- [[FEATURE]: Add support for new pipe syntax for drizzle-valibot](https://github.com/drizzle-team/drizzle-orm/issues/2358)
- [[BUG]: drizzle-zod's createInsertSchema() can't handle column of type vector](https://github.com/drizzle-team/drizzle-orm/issues/2424)
- [[BUG]: drizzle-typebox fails to map geometry column to type-box schema](https://github.com/drizzle-team/drizzle-orm/issues/2516)
- [[BUG]: drizzle-valibot does not provide types for returned schemas](https://github.com/drizzle-team/drizzle-orm/issues/2521)
- [[BUG]: Drizzle-typebox types SQLite real field to string](https://github.com/drizzle-team/drizzle-orm/issues/2524)
- [[BUG]: drizzle-zod: documented usage generates type error with exactOptionalPropertyTypes](https://github.com/drizzle-team/drizzle-orm/issues/2550)
- [[BUG]: drizzle-zod does not respect/count db type range](https://github.com/drizzle-team/drizzle-orm/issues/2737)
- [[BUG]: drizzle-zod not overriding optional](https://github.com/drizzle-team/drizzle-orm/issues/2755)
- [[BUG]:drizzle-zod doesn't accept custom id value](https://github.com/drizzle-team/drizzle-orm/issues/2957)
- [[FEATURE]: Support for Database Views in Drizzle Zod](https://github.com/drizzle-team/drizzle-orm/issues/3398)
- [[BUG]: drizzle-valibot return type any](https://github.com/drizzle-team/drizzle-orm/issues/3621)
- [[BUG]: drizzle-zod Type generation results in undefined types](https://github.com/drizzle-team/drizzle-orm/issues/3645)
- [[BUG]: GeneratedAlwaysAs](https://github.com/drizzle-team/drizzle-orm/issues/3511)
- [[FEATURE]: $inferSelect on a view](https://github.com/drizzle-team/drizzle-orm/issues/2610)
- [[BUG]:Can't infer props from view in schema](https://github.com/drizzle-team/drizzle-orm/issues/3392)
================================================
FILE: changelogs/drizzle-orm/0.38.1.md
================================================
- Closed [[FEATURE]: Add more flexible typing for usage with exactOptionalPropertyTypes](https://github.com/drizzle-team/drizzle-orm/issues/2742)
================================================
FILE: changelogs/drizzle-orm/0.38.2.md
================================================
# New features
## `USE INDEX`, `FORCE INDEX` and `IGNORE INDEX` for MySQL
In MySQL, the statements USE INDEX, FORCE INDEX, and IGNORE INDEX are hints used in SQL queries to influence how the query optimizer selects indexes. These hints provide fine-grained control over index usage, helping optimize performance when the default behavior of the optimizer is not ideal.
### Use Index
The `USE INDEX` hint suggests to the optimizer which indexes to consider when processing the query. The optimizer is not forced to use these indexes but will prioritize them if they are suitable.
```ts
export const users = mysqlTable('users', {
id: int('id').primaryKey(),
name: varchar('name', { length: 100 }).notNull(),
}, () => [usersTableNameIndex]);
const usersTableNameIndex = index('users_name_index').on(users.name);
await db.select()
.from(users, { useIndex: usersTableNameIndex })
.where(eq(users.name, 'David'));
```
### Ignore Index
The `IGNORE INDEX` hint tells the optimizer to avoid using specific indexes for the query. MySQL will consider all other indexes (if any) or perform a full table scan if necessary.
```ts
export const users = mysqlTable('users', {
id: int('id').primaryKey(),
name: varchar('name', { length: 100 }).notNull(),
}, () => [usersTableNameIndex]);
const usersTableNameIndex = index('users_name_index').on(users.name);
await db.select()
.from(users, { ignoreIndex: usersTableNameIndex })
.where(eq(users.name, 'David'));
```
### Force Index
The `FORCE INDEX` hint forces the optimizer to use the specified index(es) for the query. If the specified index cannot be used, MySQL will not fall back to other indexes; it might resort to a full table scan instead.
```ts copy
export const users = mysqlTable('users', {
id: int('id').primaryKey(),
name: varchar('name', { length: 100 }).notNull(),
}, () => [usersTableNameIndex]);
const usersTableNameIndex = index('users_name_index').on(users.name);
await db.select()
.from(users, { forceIndex: usersTableNameIndex })
.where(eq(users.name, 'David'));
```
You can also combine those hints and use multiple indexes in a query if you need
================================================
FILE: changelogs/drizzle-orm/0.38.3.md
================================================
- Fix incorrect deprecation detection for table declarations
================================================
FILE: changelogs/drizzle-orm/0.38.4.md
================================================
- New SingleStore type `vector` - thanks @mitchwadair
- Fix wrong DROP INDEX statement generation, [#3866](https://github.com/drizzle-team/drizzle-orm/pull/3866) - thanks @WaciX
- Typo fixes - thanks @stephan281094
================================================
FILE: changelogs/drizzle-orm/0.39.0.md
================================================
# New features
## Bun SQL driver support
You can now use the new Bun SQL driver released in Bun v1.2.0 with Drizzle
```ts
import { drizzle } from 'drizzle-orm/bun-sql';
const db = drizzle(process.env.PG_DB_URL!);
const result = await db.select().from(...);
```
or you can use Bun SQL instance
```ts
import { drizzle } from 'drizzle-orm/bun-sqlite';
import { SQL } from 'bun';
const client = new SQL(process.env.PG_DB_URL!);
const db = drizzle({ client });
const result = await db.select().from(...);
```
Current Limitations:
- `json` and `jsonb` inserts and selects currently perform an additional `JSON.stringify` on the Bun SQL side. Once this is removed, they should work properly. You can always use custom types and redefine the mappers to and from the database.
- `datetime`, `date`, and `timestamp` will not work properly when using `mode: string` in Drizzle. This is due to Bun's API limitations, which prevent custom parsers for queries. As a result, Drizzle cannot control the response sent from Bun SQL to Drizzle. Once this feature is added to Bun SQL, it should work as expected.
- `array` types currently have issues in Bun SQL.
> You can check more in [Bun docs](https://bun.sh/docs/api/sql)
>
> You can check more getting started examples in [Drizzle docs](https://orm.drizzle.team/docs/get-started/bun-sql-new)
## WITH now supports INSERT, UPDATE, DELETE and raw sql template
**`with` and `insert`**
```ts
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
});
const sq = db.$with('sq').as(
db.insert(users).values({ name: 'John' }).returning(),
);
const result = await db.with(sq).select().from(sq);
```
**`with` and `update`**
```ts
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
});
const sq = db.$with('sq').as(
db.update(users).set({ age: 25 }).where(eq(users.name, 'John')).returning(),
);
const result = await db.with(sq).select().from(sq);
```
**`with` and `delete`**
```ts
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
});
const sq = db.$with('sq').as(
db.delete(users).where(eq(users.name, 'John')).returning(),
);
const result = await db.with(sq).select().from(sq);
```
**`with` and `sql`**
```ts
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
});
const sq = db.$with('sq', {
userId: users.id,
data: {
name: users.name,
},
}).as(sql`select * from ${users} where ${users.name} = 'John'`);
const result = await db.with(sq).select().from(sq);
```
## New tables in `/neon` import
In this release you can use `neon_identity` schema and `users_sync` table inside this schema by just importing it from `/neon`
```ts
// "drizzle-orm/neon"
const neonIdentitySchema = pgSchema('neon_identity');
/**
* Table schema of the `users_sync` table used by Neon Identity.
* This table automatically synchronizes and stores user data from external authentication providers.
*
* @schema neon_identity
* @table users_sync
*/
export const usersSync = neonIdentitySchema.table('users_sync', {
rawJson: jsonb('raw_json').notNull(),
id: text().primaryKey().notNull(),
name: text(),
email: text(),
createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }),
deletedAt: timestamp('deleted_at', { withTimezone: true, mode: 'string' }),
});
```
# Utils and small improvements
## `getViewName` util function
```ts
import { getViewName } from 'drizzle-orm/sql'
export const user = pgTable("user", {
id: serial(),
name: text(),
email: text(),
});
export const userView = pgView("user_view").as((qb) => qb.select().from(user));
const viewName = getViewName(userView)
```
# Bug fixed and GitHub issue closed
- [[FEATURE]: allow INSERT in CTEs (WITH clauses)](https://github.com/drizzle-team/drizzle-orm/issues/2078)
- [[FEATURE]: Support Raw SQL in CTE Query Builder](https://github.com/drizzle-team/drizzle-orm/issues/2168)
- [[FEATURE]: include pre-defined database objects related to Neon Identity in drizzle-orm](https://github.com/drizzle-team/drizzle-orm/issues/3959)
- [[BUG]: $count is undefined on withReplicas](https://github.com/drizzle-team/drizzle-orm/issues/3951)
- [[FEATURE]: get[Materialized]ViewName, ie getTableName but for (materialized) views.](https://github.com/drizzle-team/drizzle-orm/issues/3946)
- [[BUG]: $count API error with vercel-postgres](https://github.com/drizzle-team/drizzle-orm/issues/3710)
- [[BUG]: Cannot use schema.coerce on refining drizzle-zod types](https://github.com/drizzle-team/drizzle-orm/issues/3842)
- [[FEATURE]: Type Coercion in drizzle-zod](https://github.com/drizzle-team/drizzle-orm/issues/776)
- [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732)
- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869)
================================================
FILE: changelogs/drizzle-orm/0.39.1.md
================================================
- Fixed SQLite onConflict clauses being overwritten instead of stacked - [#2276](https://github.com/drizzle-team/drizzle-orm/issues/2276)
- Added view support to `aliasedTable()`
- Fixed sql builder prefixing aliased views and tables with their schema
================================================
FILE: changelogs/drizzle-orm/0.39.2.md
================================================
- To be compatible with latest Neon Auth feature we renamed the pre-defined schema internally, from `neon_identity` to `neon_auth` - thanks @pffigueiredo
================================================
FILE: changelogs/drizzle-orm/0.39.3.md
================================================
- Remove `react` from peerDependencies
================================================
FILE: changelogs/drizzle-orm/0.40.0.md
================================================
# New Features
## Added `Gel` dialect support and `gel-js` client support
Drizzle is getting a new `Gel` dialect with its own types and Gel-specific logic. In this first iteration, almost all query-building features have been copied from the `PostgreSQL` dialect since Gel is fully PostgreSQL-compatible. The only change in this iteration is the data types. The Gel dialect has a different set of available data types, and all mappings for these types have been designed to avoid any extra conversions on Drizzle's side. This means you will insert and select exactly the same data as supported by the Gel protocol.
Drizzle + Gel integration will work only through `drizzle-kit pull`. Drizzle won't support `generate`, `migrate`, or `push` features in this case. Instead, drizzle-kit is used solely to pull the Drizzle schema from the Gel database, which can then be used in your `drizzle-orm` queries.
The Gel + Drizzle workflow:
1. Use the `gel` CLI to manage your schema.
2. Use the `gel` CLI to generate and apply migrations to the database.
3. Use drizzle-kit to pull the Gel database schema into a Drizzle schema.
4. Use drizzle-orm with gel-js to query the Gel database.
Here is a small example of how to connect to Gel using Drizzle:
```typescript copy
// Make sure to install the 'gel' package
import { drizzle } from "drizzle-orm/gel";
import { createClient } from "gel";
const gelClient = createClient();
const db = drizzle({ client: gelClient });
const result = await db.execute('select 1');
```
On the drizzle-kit side you can now use `dialect: "gel"`
```ts
// drizzle.config.ts
import { defineConfig } from 'drizzle-kit';
export default defineConfig({
dialect: 'gel',
});
```
For a complete Get Started tutorial you can use our new guides:
- [Get Started with Drizzle and Gel in a new project](https://orm.drizzle.team/docs/get-started/gel-new)
- [Get Started with Drizzle and Gel in a existing project](https://orm.drizzle.team/docs/get-started/gel-existing)
================================================
FILE: changelogs/drizzle-orm/0.40.1.md
================================================
#### Updates to `neon-http` for `@neondatabase/serverless@1.0.0` - thanks @jawj
Starting from this version, drizzle-orm will be compatible with both `@neondatabase/serverless` <1.0 and >1.0
================================================
FILE: changelogs/drizzle-orm/0.41.0.md
================================================
- `bigint`, `number` modes for `SQLite`, `MySQL`, `PostgreSQL`, `SingleStore` `decimal` & `numeric` column types
- Changed behavior of `sql-js` query preparation to query prebuild instead of db-side prepare due to need to manually free prepared queries, removed `.free()` method
- Fixed `MySQL`, `SingleStore` `varchar` allowing not specifying `length` in config
- Fixed `MySQL`, `SingleStore` `binary`, `varbinary` data\\type mismatches
- Fixed `numeric`\\`decimal` data\\type mismatches: [#1290](https://github.com/drizzle-team/drizzle-orm/issues/1290), [#1453](https://github.com/drizzle-team/drizzle-orm/issues/1453)
- Fixed `drizzle-studio` + `AWS Data Api` connection issue: [#3224](https://github.com/drizzle-team/drizzle-orm/issues/3224)
- Fixed `isConfig` utility function checking types of wrong fields
- Enabled `supportBigNumbers` in auto-created `mysql2` driver instances
- Fixed custom schema tables querying in RQBv1: [#4060](https://github.com/drizzle-team/drizzle-orm/issues/4060)
- Removed in-driver mapping for postgres types `1231` (`numeric[]`), `1115` (`timestamp[]`), `1185` (`timestamp_with_timezone[]`), `1187` (`interval[]`), `1182` (`date[]`), preventing precision loss and data\\type mismatches
- Fixed `SQLite` `buffer`-mode `blob` sometimes returning `number[]`
================================================
FILE: changelogs/drizzle-orm/0.42.0.md
================================================
## Features
### Duplicate imports removal
When importing from `drizzle-orm` using custom loaders, you may encounter issues such as: `SyntaxError: The requested module 'drizzle-orm' does not provide an export named 'eq'`
This issue arose because there were duplicated exports in `drizzle-orm`. To address this, we added a set of tests that checks every file in `drizzle-orm` to ensure all exports are valid. These tests will fail if any new duplicated exports appear.
In this release, we’ve removed all duplicated exports, so you should no longer encounter this issue.
### `pgEnum` and `mysqlEnum` now can accept both strings and TS enums
If you provide a TypeScript enum, all your types will be inferred as that enum - so you can insert and retrieve enum values directly. If you provide a string union, it will work as before.
```ts
enum Test {
a = 'a',
b = 'b',
c = 'c',
}
const tableWithTsEnums = mysqlTable('enums_test_case', {
id: serial().primaryKey(),
enum1: mysqlEnum(Test).notNull(),
enum2: mysqlEnum(Test).default(Test.a),
});
await db.insert(tableWithTsEnums).values([
{ id: 1, enum1: Test.a, enum2: Test.b, enum3: Test.c },
{ id: 2, enum1: Test.a, enum3: Test.c },
{ id: 3, enum1: Test.a },
]);
const res = await db.select().from(tableWithTsEnums);
expect(res).toEqual([
{ id: 1, enum1: 'a', enum2: 'b', enum3: 'c' },
{ id: 2, enum1: 'a', enum2: 'a', enum3: 'c' },
{ id: 3, enum1: 'a', enum2: 'a', enum3: 'b' },
]);
```
## Improvements
- Make `inArray` accept `ReadonlyArray` as a value - thanks @Zamiell
- Pass row type parameter to `@planetscale/database`'s execute - thanks @ayrton
- New `InferEnum` type - thanks @totigm
## Issues closed
- [Add first-class support for TS native enums](https://github.com/drizzle-team/drizzle-orm/issues/332)
- [[FEATURE]: support const enums](https://github.com/drizzle-team/drizzle-orm/issues/2798)
- [[BUG]: SyntaxError: The requested module 'drizzle-orm' does not provide an export named 'lte'](https://github.com/drizzle-team/drizzle-orm/issues/4079)
================================================
FILE: changelogs/drizzle-orm/0.43.0.md
================================================
## Features
- Added `cross join` \([#1414](https://github.com/drizzle-team/drizzle-orm/issues/1414)\)
- Added lateral `left`, `inner`, `cross` joins to `PostgreSQL`, `MySQL`, `Gel`, `SingleStore`
- Added drizzle connection attributes to `SingleStore`'s driver instances
## Fixes
- Removed unsupported by dialect `full join` from `MySQL` select api
- Forced `Gel` columns to always have explicit schema & table prefixes due to potential errors caused by lack of such prefix in subquery's selection when there's already a column bearing same name in context
- Added missing export for `PgTextBuilderInitial` type
- Removed outdated `IfNotImported` type check from `SingleStore` driver initializer
- Fixed incorrect type inferrence for insert and update models with non-strict `tsconfig`s \([#2654](https://github.com/drizzle-team/drizzle-orm/issues/2654)\)
- Fixed invalid spelling of `nowait` flag \([#3554](https://github.com/drizzle-team/drizzle-orm/issues/3554)\)
- [Add join lateral support](https://github.com/drizzle-team/drizzle-orm/issues/420)
- [Remove .fullJoin() from MySQL API](https://github.com/drizzle-team/drizzle-orm/issues/1125)
================================================
FILE: changelogs/drizzle-orm/0.43.1.md
================================================
## Fixes
- [Fixed incorrect types of schema enums in PostgreSQL](https://github.com/drizzle-team/drizzle-orm/issues/4421)
================================================
FILE: changelogs/drizzle-orm/0.44.0.md
================================================
## Error handling
Starting from this version, we’ve introduced a new `DrizzleQueryError` that wraps all errors from database drivers and provides a set of useful information:
1. A proper stack trace to identify which exact `Drizzle` query failed
2. The generated SQL string and its parameters
3. The original stack trace from the driver that caused the DrizzleQueryError
## Drizzle `cache` module
Drizzle sends every query straight to your database by default. There are no hidden actions, no automatic caching or invalidation - you’ll always see exactly what runs. If you want caching, you must opt in.
By default, Drizzle uses a explicit caching strategy (i.e. `global: false`), so nothing is ever cached unless you ask. This prevents surprises or hidden performance traps in your application. Alternatively, you can flip on all caching (global: true) so that every select will look in cache first.
Out first native integration was built together with Upstash team and let you natively use `upstash` as a cache for your drizzle queries
```ts
import { upstashCache } from "drizzle-orm/cache/upstash";
import { drizzle } from "drizzle-orm/...";
const db = drizzle(process.env.DB_URL!, {
cache: upstashCache({
// 👇 Redis credentials (optional — can also be pulled from env vars)
url: '',
token: '',
// 👇 Enable caching for all queries by default (optional)
global: true,
// 👇 Default cache behavior (optional)
config: { ex: 60 }
})
});
```
You can also implement your own cache, as Drizzle exposes all the necessary APIs, such as get, put, mutate, etc.
You can find full implementation details on the [website](https://orm.drizzle.team/docs/cache#custom-cache)
```ts
import Keyv from "keyv";
export class TestGlobalCache extends Cache {
private globalTtl: number = 1000;
// This object will be used to store which query keys were used
// for a specific table, so we can later use it for invalidation.
private usedTablesPerKey: Record = {};
constructor(private kv: Keyv = new Keyv()) {
super();
}
// For the strategy, we have two options:
// - 'explicit': The cache is used only when .$withCache() is added to a query.
// - 'all': All queries are cached globally.
// The default behavior is 'explicit'.
override strategy(): "explicit" | "all" {
return "all";
}
// This function accepts query and parameters that cached into key param,
// allowing you to retrieve response values for this query from the cache.
override async get(key: string): Promise {
...
}
// This function accepts several options to define how cached data will be stored:
// - 'key': A hashed query and parameters.
// - 'response': An array of values returned by Drizzle from the database.
// - 'tables': An array of tables involved in the select queries. This information is needed for cache invalidation.
//
// For example, if a query uses the "users" and "posts" tables, you can store this information. Later, when the app executes
// any mutation statements on these tables, you can remove the corresponding key from the cache.
// If you're okay with eventual consistency for your queries, you can skip this option.
override async put(
key: string,
response: any,
tables: string[],
config?: CacheConfig,
): Promise {
...
}
// This function is called when insert, update, or delete statements are executed.
// You can either skip this step or invalidate queries that used the affected tables.
//
// The function receives an object with two keys:
// - 'tags': Used for queries labeled with a specific tag, allowing you to invalidate by that tag.
// - 'tables': The actual tables affected by the insert, update, or delete statements,
// helping you track which tables have changed since the last cache update.
override async onMutate(params: {
tags: string | string[];
tables: string | string[] | Table | Table[];
}): Promise {
...
}
}
```
For more usage example you can check our [docs](https://orm.drizzle.team/docs/cache#cache-usage-examples)
================================================
FILE: changelogs/drizzle-orm/0.44.1.md
================================================
- [[BUG]: Drizzle can no longer run on Durable Objects](https://github.com/drizzle-team/drizzle-orm/issues/4586)
================================================
FILE: changelogs/drizzle-orm/0.44.2.md
================================================
- [BUG]: Fixed type issues with joins with certain variations of `tsconfig`: [#4535](https://github.com/drizzle-team/drizzle-orm/issues/4535), [#4457](https://github.com/drizzle-team/drizzle-orm/issues/4457)
================================================
FILE: changelogs/drizzle-orm/0.44.3.md
================================================
- Fixed types of `$client` for clients created by drizzle function
```ts
await db.$client.[...]
```
- Added the `updated_at` column to the `neon_auth.users_sync` table definition.
================================================
FILE: changelogs/drizzle-orm/0.44.4.md
================================================
- Fix wrong DrizzleQueryError export. thanks @nathankleyn
================================================
FILE: changelogs/drizzle-orm/0.44.5.md
================================================
- Fixed invalid usage of `.one()` in `durable-sqlite` session
- Fixed spread operator related crash in sqlite `blob` columns
- Better browser support for sqlite `blob` columns
- Improved sqlite `blob` mapping
================================================
FILE: changelogs/drizzle-orm/0.44.6.md
================================================
- feat: add $replicas reference #4874
================================================
FILE: changelogs/drizzle-orm/0.44.7.md
================================================
- fix durable sqlite transaction return value #3746 - thanks @joaocstro
================================================
FILE: changelogs/drizzle-orm/0.45.0.md
================================================
- Fixed pg-native Pool detection in node-postgres transactions
- Allowed subqueries in select fields
- Updated typo algorythm => algorithm
- Fixed `$onUpdate` not handling `SQL` values (fixes [#2388](https://github.com/drizzle-team/drizzle-orm/issues/2388), tests implemented by [L-Mario564](https://github.com/L-Mario564) in [#2911](https://github.com/drizzle-team/drizzle-orm/pull/2911))
- Fixed `pg` mappers not handling `Date` instances in `bun-sql:postgresql` driver responses for `date`, `timestamp` types (fixes [#4493](https://github.com/drizzle-team/drizzle-orm/issues/4493))
================================================
FILE: changelogs/drizzle-orm/0.45.1.md
================================================
- Fixed pg-native Pool detection in node-postgres transactions breaking in environments with forbidden `require()` ([#5107](https://github.com/drizzle-team/drizzle-orm/issues/5107))
================================================
FILE: changelogs/drizzle-orm-mysql/0.14.1.md
================================================
# drizzle-orm-mysql 0.14.1
- Release support for mysql. Currently mysql module is up-to-date with `pg` and `sqlite`
================================================
FILE: changelogs/drizzle-orm-mysql/0.14.2.md
================================================
# drizzle-orm-mysql 0.14.2
- Bumped everything to 0.14.2
================================================
FILE: changelogs/drizzle-orm-mysql/0.14.3.md
================================================
# drizzle-orm-mysql 0.14.3
- Fill author field in package.json
================================================
FILE: changelogs/drizzle-orm-mysql/0.15.0.md
================================================
# drizzle-orm-mysql 0.15.0
- Bumped everything to 0.15.0
================================================
FILE: changelogs/drizzle-orm-mysql/0.15.1.md
================================================
# drizzle-orm-mysql 0.15.1
Add support for schemas -> [MySQL schemas](https://dev.mysql.com/doc/refman/8.0/en/create-database.html)
> **Warning**
> If you will have tables with same names in different schemas then drizzle will respond with `never[]` error in result types and error from database
>
> In this case you may use [alias syntax](https://github.com/drizzle-team/drizzle-orm/tree/main/drizzle-orm-mysql#join-aliases-and-self-joins)
---
Usage example
```typescript
// Table in default schema
const publicUsersTable = mysqlTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
verified: boolean('verified').notNull().default(false),
jsonb: json('jsonb'),
createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(),
});
// Table in custom schema
const mySchema = mysqlSchema('mySchema');
const mySchemaUsersTable = mySchema('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
verified: boolean('verified').notNull().default(false),
jsonb: json('jsonb'),
createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(),
});
```
---
## Breaking changes
- `foreignKey()` function api changes. Previously you need to pass callback function with table columns for FK. Right now no need for callback, just object with data for FK
#### Before
```typescript
export const usersTable = mysqlTable('userstest', {
id: serial('id').primaryKey(),
homeCity: text('name').notNull(),
createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(),
}, (users) => ({
// foreignKey has a callback as param
usersCityFK: foreignKey(() => { columns: [users.homeCity], foreignColumns: [cities.id] }),
}));
```
#### Now
```typescript
export const usersTable = mysqlTable('userstest', {
id: serial('id').primaryKey(),
homeCity: text('name').notNull(),
createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(),
}, (users) => ({
// foreignKey has a callback as param
usersCityFK: foreignKey({ columns: [users.homeCity], foreignColumns: [cities.id] }),
}));
```
---
- Change enum initializing strategy for mysql
You should use
``` typescript
mysqlEnum('popularity', ['unknown', 'known', 'popular']).notNull().default('known')
```
instead of
``` typescript
export const popularityEnum = mysqlEnum('popularity', ['unknown', 'known', 'popular']);
popularityEnum('column_name');
```
Usage example in table schema
``` typescript
const tableWithEnums = mysqlTable('enums_test_case', {
id: serial('id').primaryKey(),
enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(),
enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'),
enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'),
});
```
================================================
FILE: changelogs/drizzle-orm-mysql/0.15.2.md
================================================
# drizzle-orm-mysql 0.15.2
Internal release
================================================
FILE: changelogs/drizzle-orm-mysql/0.15.3.md
================================================
# drizzle-orm-mysql 0.15.3
Internal release
================================================
FILE: changelogs/drizzle-orm-mysql/0.16.0.md
================================================
# drizzle-orm-mysql 0.16.0
- Bump all packages to 0.16.0
================================================
FILE: changelogs/drizzle-orm-mysql/0.16.1.md
================================================
# drizzle-orm-mysql 0.16.1
- Add possibility to define database custom data types
Example usage:
```typescript
const customText = customType<{ data: string }>({
dataType() {
return 'text';
},
});
const usersTable = mysqlTable('users', {
name: customText('name').notNull(),
});
```
For more examples please check [docs](https://github.com/drizzle-team/drizzle-orm/blob/main/docs/custom-types.lite.md)
================================================
FILE: changelogs/drizzle-orm-mysql/0.16.2.md
================================================
# drizzle-orm-mysql 0.16.2
- Fix peer dependency error for >=0.16 drizzle packages
================================================
FILE: changelogs/drizzle-orm-pg/0.12.0-beta.40.md
================================================
# drizzle-orm-pg 0.12.0-beta.40
- Added prepared statements and placeholders support.
- Refactored `.select().fields()` to allow fields from joined tables and nested objects structure, removed partial selects from joins.
- Allowed passing query builders to `db.execute`.
- Optimized INSERT query generation for single values by skipping columns without values.
- Exposed `table` property from index config.
- Removed testing utils.
================================================
FILE: changelogs/drizzle-orm-pg/0.13.0.md
================================================
# drizzle-orm-pg 0.13.0
- Release 🎉
================================================
FILE: changelogs/drizzle-orm-pg/0.13.1.md
================================================
# drizzle-orm-pg 0.13.1
- Implemented node-pg prepared statements usage via adding `name` argument to `.prepare()` method.
================================================
FILE: changelogs/drizzle-orm-pg/0.13.2.md
================================================
# drizzle-orm-pg 0.13.2
- Fix prepared statements usage.
================================================
FILE: changelogs/drizzle-orm-pg/0.13.3.md
================================================
# drizzle-orm-pg 0.13.3
- Implemented NeonDB serverless driver support.
- (internal) Added `session.all()` and `session.values()` methods.
================================================
FILE: changelogs/drizzle-orm-pg/0.13.4.md
================================================
# drizzle-orm-pg 0.13.4
- Fixed types for IndexBuilder.
================================================
FILE: changelogs/drizzle-orm-pg/0.14.0.md
================================================
# drizzle-orm-pg 0.14.0
- Separated migrations functionality to a separate import:
```typescript
import { migrate } from 'drizzle-orm-pg/node/migrate';
```
- Replaced `await new PgConnector(client).connect()` with `drizzle(client)`.
- `import { PgConnector } from 'drizzle-orm-pg` -> `import { drizzle } from 'drizzle-orm-pg/node`.
================================================
FILE: changelogs/drizzle-orm-pg/0.14.1.md
================================================
# drizzle-orm-pg 0.14.1
- Bumped everything to 0.14.1.
================================================
FILE: changelogs/drizzle-orm-pg/0.14.2.md
================================================
# drizzle-orm-pg 0.14.2
- Bumped everything to 0.14.2
================================================
FILE: changelogs/drizzle-orm-pg/0.14.3.md
================================================
# drizzle-orm-pg 0.14.3
- Fixed `.onConflict` statement query builder. In previous versions target column was mapped together with table name
- Added documentation examples for `onConflict`
- Added documentation examples for returning statements for insert/update/delete
- Add more tests for `onConflict` query builder
================================================
FILE: changelogs/drizzle-orm-pg/0.14.4.md
================================================
# drizzle-orm-pg 0.14.4
- Fill author field in package.json
================================================
FILE: changelogs/drizzle-orm-pg/0.15.0.md
================================================
# drizzle-orm-pg 0.15.0
- Set `notNull` to `true` in runtime, when `.primaryKey()` function was used in `ColumnBuilder`
- Set `no action` for `OnDelete` and `OnUpdate` in runtime by default
- Add internal version for ORM api
- Index name now becomes optional. You can write either `index('usersNameIdx')` or `index()`. In last case, drizzle will generate index name automatically based on table and column index was created on
## Breaking changes
`foreignKey()` function api changes. Previosuly you need to pass callback function with table columns for FK. Right now no need for callback, just object with data for FK
#### Before
```typescript
export const usersTable = pgTable(
'users_table',
{
id: serial('id').primaryKey(),
uuid: uuid('uuid').defaultRandom().notNull(),
homeCity: integer('home_city').notNull()
},
(users) => ({
// foreignKey had a callback as param
usersCityFK: foreignKey(() => ({ columns: [users.homeCity], foreignColumns: [cities.id] })),
}),
);
```
#### Now
```typescript
export const usersTable = pgTable(
'users_table',
{
id: serial('id').primaryKey(),
uuid: uuid('uuid').defaultRandom().notNull(),
homeCity: integer('home_city').notNull()
},
(users) => ({
// foreignKey doesn't have a callback as param
usersCityFK: foreignKey({ columns: [users.homeCity], foreignColumns: [cities.id] }),
}),
);
```
================================================
FILE: changelogs/drizzle-orm-pg/0.15.1.md
================================================
# drizzle-orm-pg 0.15.1
Add support for schemas -> [PostgreSQL schemas](https://www.postgresql.org/docs/current/ddl-schemas.html)
---
Drizzle won't append any schema before table definition by default. So if your tables are in `public` schema drizzle generate -> `select * from "users"`
But if you will specify any custom schema you want, then drizzle will generate -> `select * from "custom_schema"."users"`
> **Warning**
> If you will have tables with same names in different schemas then drizzle will respond with `never[]` error in result types and error from database
>
> In this case you may use [alias syntax](https://github.com/drizzle-team/drizzle-orm/tree/main/drizzle-orm-pg#join-aliases-and-self-joins)
---
Usage example
```typescript
// Table in default schema
const publicUsersTable = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
verified: boolean('verified').notNull().default(false),
jsonb: jsonb('jsonb'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
});
// Table in custom schema
const mySchema = pgSchema('mySchema');
const usersTable = mySchema('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
verified: boolean('verified').notNull().default(false),
jsonb: jsonb('jsonb'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
});
```
================================================
FILE: changelogs/drizzle-orm-pg/0.15.2.md
================================================
# drizzle-orm-pg 0.15.2
Internal release
================================================
FILE: changelogs/drizzle-orm-pg/0.15.3.md
================================================
# drizzle-orm-pg 0.15.3
Internal release
================================================
FILE: changelogs/drizzle-orm-pg/0.16.0.md
================================================
# drizzle-orm-pg 0.16.0
- Implemented [postgres.js](https://github.com/porsager/postgres) driver support ([docs](/drizzle-orm-pg/src/postgres-js/README.md))
================================================
FILE: changelogs/drizzle-orm-pg/0.16.1.md
================================================
# drizzle-orm-pg 0.16.1
- Fix documentation links
================================================
FILE: changelogs/drizzle-orm-pg/0.16.2.md
================================================
- Add possibility to define database custom data types
Example usage:
```typescript
const customText = customType<{ data: string }>({
dataType() {
return 'text';
},
});
const usersTable = pgTable('users', {
name: customText('name').notNull(),
});
```
For more examples please check [docs](https://github.com/drizzle-team/drizzle-orm/blob/main/docs/custom-types.lite.md)
================================================
FILE: changelogs/drizzle-orm-pg/0.16.3.md
================================================
# drizzle-orm-pg 0.16.3
- Fix peer dependency error for >=0.16 drizzle packages
================================================
FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.17.md
================================================
# drizzle-orm-sqlite 0.12.0-beta.17
- Refactored `.select().fields()` to allow fields from joined tables and nested objects structure, removed partial selects from joins.
- Replaced `.execute()` in query builders and prepared statements with `.run()`, `.all()`, `.get()`, `.values()`.
================================================
FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.18.md
================================================
# drizzle-orm-sqlite 0.12.0-beta.18
- Updated `better-sqlite3` and `@types/better-sqlite3` peer dependency from `<8` to `<9`.
================================================
FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.19.md
================================================
# drizzle-orm-sqlite 0.12.0-beta.19
- Fix bug with running migrations. `Error: SqliteError: near "SCHEMA": syntax error` was fixed
================================================
FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.20.md
================================================
# drizzle-orm-sqlite 0.12.0-beta.20
- Fix bug with running migrations for async driver. `Error: SqliteError: near "SCHEMA": syntax error` was fixed
- Fix `Statement does not return any data - use run()` error, when no fields were provided to prepared statement
================================================
FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.21.md
================================================
# drizzle-orm-sqlite 0.12.0-beta.21
- Fixed `db.all` logic for all drivers.
- Allowed passing query builders to raw query execution methods.
- Optimized INSERT query generation for single values by skipping columns without values.
- Exposed `table` property from index config.
================================================
FILE: changelogs/drizzle-orm-sqlite/0.13.0.md
================================================
# drizzle-orm-sqlite 0.13.0
- Release 🎉
================================================
FILE: changelogs/drizzle-orm-sqlite/0.14.1.md
================================================
# drizzle-orm-sqlite 0.14.1
- Separated migrations functionality to a separate import:
```typescript
import { migrate } from 'drizzle-orm-sqlite/better-sqlite3/migrate';
```
- Replaced `await new SQLiteConnector(client).connect()` with `drizzle(client)`.
- `import { SQLiteConnector } from 'drizzle-orm-sqlite` -> `import { drizzle } from 'drizzle-orm-pg/better-sqlite3`.
================================================
FILE: changelogs/drizzle-orm-sqlite/0.14.2.md
================================================
# drizzle-orm-sqlite 0.14.2
- Bumped everything to 0.14.2
================================================
FILE: changelogs/drizzle-orm-sqlite/0.14.3.md
================================================
# drizzle-orm-sqlite 0.14.3
- `RangeError: The supplied SQL string contains more than one statement` error on migrations was fixed
Created `.exec()` method for session, that will run query without prepared statments
- Fix `defaultNow()` method query generation by adding missin `"()"`.
Previously default value was generated as
```sql
cast((julianday('now') - 2440587.5)*86400000 as integer)
```
Currently default value looks like
```sql
(cast((julianday('now') - 2440587.5)*86400000 as integer))
```
- Create test cases for both issues
================================================
FILE: changelogs/drizzle-orm-sqlite/0.14.4.md
================================================
# drizzle-orm-sqlite 0.14.4
- Fix adding autoincrement to `drizzle-kit` migrations
================================================
FILE: changelogs/drizzle-orm-sqlite/0.14.5.md
================================================
# drizzle-orm-sqlite 0.14.5
- Remove upper bound restriction from `@cloudflare/workers-types` peer dependency
- Fill author field in package.json
================================================
FILE: changelogs/drizzle-orm-sqlite/0.15.0.md
================================================
# drizzle-orm-sqlite 0.15.0
- Add composite PK's on table schema definition
#### Usage example
```typescript
const pkExample = sqliteTable('pk_example', {
id: integer('id'),
name: text('name').notNull(),
email: text('email').notNull(),
}, (table) => ({
compositePk: primaryKey(table.id, table.name)
}));
```
================================================
FILE: changelogs/drizzle-orm-sqlite/0.15.2.md
================================================
# drizzle-orm-sqlite 0.15.2
Internal release
================================================
FILE: changelogs/drizzle-orm-sqlite/0.15.3.md
================================================
# drizzle-orm-sqlite 0.15.3
Internal release
================================================
FILE: changelogs/drizzle-orm-sqlite/0.15.4.md
================================================
# drizzle-orm-sqlite 0.15.4
- Implemented [sql.js](https://github.com/sql-js/sql.js/) driver support (allows you to use SQLite in the browser)
================================================
FILE: changelogs/drizzle-orm-sqlite/0.16.0.md
================================================
# drizzle-orm-sqlite 0.16.0
- Bump all packages to 0.16.0
================================================
FILE: changelogs/drizzle-orm-sqlite/0.16.1.md
================================================
# drizzle-orm-sqlite 0.16.1
- Fix peer dependency error for >=0.16 drizzle packages
================================================
FILE: changelogs/drizzle-seed/0.1.1.md
================================================
# Initial Release
> [!NOTE]
> `drizzle-seed` can only be used with `drizzle-orm@0.36.4` or higher. Versions lower than this may work at runtime but could have type issues and identity column issues, as this patch was introduced in `drizzle-orm@0.36.4`
## Full Reference
The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview)
## Basic Usage
In this example we will create 10 users with random names and ids
```ts {12}
import { pgTable, integer, text } from "drizzle-orm/pg-core";
import { drizzle } from "drizzle-orm/node-postgres";
import { seed } from "drizzle-seed";
const users = pgTable("users", {
id: integer().primaryKey(),
name: text().notNull(),
});
async function main() {
const db = drizzle(process.env.DATABASE_URL!);
await seed(db, { users });
}
main();
```
## Options
**`count`**
By default, the `seed` function will create 10 entities.
However, if you need more for your tests, you can specify this in the seed options object
```ts
await seed(db, schema, { count: 1000 });
```
**`seed`**
If you need a seed to generate a different set of values for all subsequent runs, you can define a different number
in the `seed` option. Any new number will generate a unique set of values
```ts
await seed(db, schema, { seed: 12345 });
```
The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview)
================================================
FILE: changelogs/drizzle-seed/0.1.2.md
================================================
- Fixed: [[BUG]: drizzle-seed reset fails without @electric-sql/pglite installed](https://github.com/drizzle-team/drizzle-orm/issues/3603)
- Fixed: [[BUG]: TypeScript type error in drizzle-seed with schema passed to drizzle in IDE](https://github.com/drizzle-team/drizzle-orm/issues/3599)
================================================
FILE: changelogs/drizzle-seed/0.1.3.md
================================================
## Bug fixes
- https://github.com/drizzle-team/drizzle-orm/issues/3644
- seeding a table with columns that have .default(sql``) will result in an error
## Features
- added support for postgres uuid columns
Example
```ts
import { pgTable, uuid } from "drizzle-orm/pg-core";
import { drizzle } from "drizzle-orm/node-postgres";
import { seed } from "drizzle-seed";
const users = pgTable("users", {
uuid: uuid("uuid"),
});
async function main() {
const db = drizzle(process.env.DATABASE_URL!);
// You can let it seed automatically
// await seed(db, { users });
// Alternatively, you can manually specify the generator in refine.
await seed(db, { users }, { count: 1000 }).refine((funcs) => ({
users: {
columns: {
uuid: funcs.uuid(),
},
},
}));
}
main();
```
##
- added support for postgres array columns
Example
```ts
import { pgTable, integer, text, varchar } from "drizzle-orm/pg-core";
import { drizzle } from "drizzle-orm/node-postgres";
import { seed } from "drizzle-seed";
const users = pgTable("users", {
id: integer().primaryKey(),
name: text().notNull(),
phone_numbers: varchar({ length: 256 }).array(),
});
```
You can specify the `arraySize` parameter in generator options, like `funcs.phoneNumber({ arraySize: 3 })`, to generate 1D arrays.
```ts
async function main() {
const db = drizzle(process.env.DATABASE_URL!);
await seed(db, { users }, { count: 1000 }).refine((funcs) => ({
users: {
columns: {
phone_numbers: funcs.phoneNumber({ arraySize: 3 }),
},
},
}));
}
main();
```
Alternatively, you can let it seed automatically, and it will handle arrays of any dimension.
```ts
async function main() {
const db = drizzle(process.env.DATABASE_URL!);
await seed(db, { users });
}
main();
```
##
- added support for cyclic tables
You can now seed tables with cyclic relations.
```ts
import type { AnyPgColumn } from "drizzle-orm/pg-core";
import {
foreignKey,
integer,
pgTable,
serial,
varchar,
} from "drizzle-orm/pg-core";
export const modelTable = pgTable(
"model",
{
id: serial().primaryKey(),
name: varchar().notNull(),
defaultImageId: integer(),
},
(t) => [
foreignKey({
columns: [t.defaultImageId],
foreignColumns: [modelImageTable.id],
}),
]
);
export const modelImageTable = pgTable("model_image", {
id: serial().primaryKey(),
url: varchar().notNull(),
caption: varchar(),
modelId: integer()
.notNull()
.references((): AnyPgColumn => modelTable.id),
});
async function main() {
const db = drizzle(process.env.DATABASE_URL!);
await seed(db, { modelTable, modelImageTable });
}
main();
```
================================================
FILE: changelogs/drizzle-seed/0.2.1.md
================================================
## API updates
We are introducing a new parameter, `version`, to the `seed` function options. This parameter, which controls generator versioning, has been added to make it easier to update deterministic generators in the future. Since values should remain consistent after each regeneration, it is crucial to provide a well-designed API for gradual updates
```ts
await seed(db, schema, { version: '2' });
```
#### Example:
> This is not an actual API change; it is just an example of how we will proceed with `drizzle-seed` versioning
For example, `lastName` generator was changed, and new version, `V2`, of this generator became available.
Later, `firstName` generator was changed, making `V3` version of this generator available.
| | `V1` | `V2` | `V3(latest)` |
| :--------------: | :--------------: | :-------------: | :--------------: |
| **LastNameGen** | `LastNameGenV1` | `LastNameGenV2` | |
| **FirstNameGen** | `FirstNameGenV1` | | `FirstNameGenV3` |
##### Use the `firstName` generator of version 3 and the `lastName` generator of version 2
```ts
await seed(db, schema);
```
If you are not ready to use latest generator version right away, you can specify max version to use
##### Use the `firstName` generator of version 1 and the `lastName` generator of version 2
```ts
await seed(db, schema, { version: '2' });
```
##### Use the `firstName` generator of version 1 and the `lastName` generator of version 1.
```ts
await seed(db, schema, { version: '1' });
```
Each update with breaking changes for generators will be documented on our docs and in release notes, explaining which version you should use, if you are not ready to upgrade the way generators works
## Breaking changes
### `interval` unique generator was changed and upgraded to v2
```ts
await seed(db, { table }).refine((f) => ({
table: {
columns: {
// this function usage will output different values with the same `seed` number from previous version
column1: f.interval({ isUnique: true }),
}
}
}))
```
**Reason for upgrade**
An older version of the generator could produce intervals like `1 minute 60 seconds` and `2 minutes 0 seconds`, treating them as distinct intervals.
However, when the `1 minute 60 seconds` interval is inserted into a PostgreSQL database, it is automatically converted to `2 minutes 0 seconds`. As a result, attempting to insert the `2 minutes 0 seconds` interval into a unique column afterwards will cause an error
**Usage**
```ts
await seed(db, schema);
// or explicit
await seed(db, schema, { version: '2' });
```
**Switch to the old version**
```ts
await seed(db, schema, { version: '1' });
```
### `string` generators were changed and upgraded to v2
```ts
await seed(db, { table }).refine((f) => ({
table: {
columns: {
// this function will output different values with the same `seed` number from previous version
column1: f.string(),
}
}
}))
```
**Reason to upgrade**
Ability to generate a unique string based on the length of the text column (e.g., `varchar(20)`)
#### PostgreSQL changes
Default generators for `text`, `varchar`, `char` will output different values with the same `seed` number from previous version.
```ts
// schema.ts
import * as p from 'drizzle-orm/pg-core'
export const table = p.pgTable('table', {
column1: p.text(),
column2: p.varchar(),
column3: p.char()
});
// index.ts
...
// this will be affected with new changes
await seed(db, { table });
```
**Switch to the old version**
```ts
await seed(db, schema, { version: '' });
```
#### MySQL changes
Default generators for `text`, `char`, `varchar`, `binary`, `varbinary` will output different values with the same `seed` number.
```ts
// schema.ts
import * as p from 'drizzle-orm/mysql-core'
export const table = p.mysqlTable('table', {
column1: p.text(),
column2: p.char(),
column3: p.varchar({ length: 256 }),
column4: p.binary(),
column5: p.varbinary({ length: 256 }),
});
// index.ts
...
// this will be affected with new changes
await seed(db, {table})
```
**Switch to the old version**
```ts
await seed(db, schema, { version: '1' });
```
#### SQLite changes
Default generators for `text`, `numeric`, `blob`, `blobbuffer` will output different values with the same `seed` number.
```ts
// schema.ts
import * as p from 'drizzle-orm/sqlite-core'
export const table = p.sqliteTable('table', {
column1: p.text(),
column2: p.numeric(),
column3: p.blob({ mode:'buffer' }),
column4: p.blob(),
});
// index.ts
...
// this will be affected with new changes
await seed(db, { table })
```
## Bug fixes
- Seeding a table with a foreign key referencing another table, without including the second table in the schema, will cause the seeding process to get stuck
- [[BUG]: seeding postgresql char column doesn't respect length option](https://github.com/drizzle-team/drizzle-orm/issues/3774)
================================================
FILE: changelogs/drizzle-seed/0.3.0.md
================================================
# New features
## Drizzle Relations support
The `seed` function can now accept Drizzle Relations objects and treat them as foreign key constraints
```ts
// schema.ts
import { integer, serial, text, pgTable } from 'drizzle-orm/pg-core';
import { relations } from 'drizzle-orm';
export const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
});
export const usersRelations = relations(users, ({ many }) => ({
posts: many(posts),
}));
export const posts = pgTable('posts', {
id: serial('id').primaryKey(),
content: text('content').notNull(),
authorId: integer('author_id').notNull(),
});
export const postsRelations = relations(posts, ({ one }) => ({
author: one(users, { fields: [posts.authorId], references: [users.id] }),
}));
```
```ts
// index.ts
import { seed } from "drizzle-seed";
import * as schema from './schema.ts'
async function main() {
const db = drizzle(process.env.DATABASE_URL!);
await seed(db, schema);
}
main();
```
================================================
FILE: changelogs/drizzle-seed/0.3.1.md
================================================
## Bug fixes
- Combining a reference in a table schema (foreign key constraint) with a one-to-many relation for the same two tables defined in the constraint causes the seeder to duplicate these relations and enter an infinite loop.
Example:
```ts
// schema.ts
import { integer, pgTable, text } from "drizzle-orm/pg-core";
import { relations } from "drizzle-orm/relations";
export const users = pgTable("users", {
id: integer().primaryKey(),
name: text(),
email: text(),
});
export const posts = pgTable("posts", {
id: integer().primaryKey(),
content: text(),
userId: integer().references(() => users.id),
});
export const postsRelation = relations(posts, ({ one }) => ({
user: one(users, {
fields: [posts.userId],
references: [users.id],
}),
}));
```
Now, seeding with the schema above will trigger a warning.
```
You are providing a one-to-many relation between the 'users' and 'posts' tables,
while the 'posts' table object already has foreign key constraint in the schema referencing 'users' table.
In this case, the foreign key constraint will be used.
```
================================================
FILE: changelogs/drizzle-typebox/0.1.0.md
================================================
# drizzle-typebox 0.1.0
- Initial release
================================================
FILE: changelogs/drizzle-typebox/0.1.1.md
================================================
- 🐛 Fixed imports in ESM projects
================================================
FILE: changelogs/drizzle-typebox/0.2.0.md
================================================
This version fully updates `drizzle-typebox` integration and makes sure it's compatible with newer typebox versions
# Breaking Changes
> You must also have Drizzle ORM v0.38.0 or greater and Typebox v0.34.8 or greater installed.
- When refining a field, if a schema is provided instead of a callback function, it will ignore the field's nullability and optional status.
- Some data types have more specific schemas for improved validation
# Improvements
Thanks to @L-Mario564 for making more updates than we expected to be shipped in this release. We'll copy his message from a PR regarding improvements made in this release:
- Output for all packages are now unminified, makes exploring the compiled code easier when published to npm.
- Smaller footprint. Previously, we imported the column types at runtime for each dialect, meaning that for example, if you're just using Postgres then you'd likely only have drizzle-orm and drizzle-orm/pg-core in the build output of your app; however, these packages imported all dialects which could lead to mysql-core and sqlite-core being bundled as well even if they're unused in your app. This is now fixed.
- Slight performance gain. To determine the column data type we used the is function which performs a few checks to ensure the column data type matches. This was slow, as these checks would pile up every quickly when comparing all data types for many fields in a table/view. The easier and faster alternative is to simply go off of the column's columnType property.
# New features
- `createSelectSchema` function now also accepts views and enums.
```ts
import { pgEnum } from 'drizzle-orm/pg-core';
import { createSelectSchema } from 'drizzle-typebox';
import { Value } from '@sinclair/typebox/value';
const roles = pgEnum('roles', ['admin', 'basic']);
const rolesSchema = createSelectSchema(roles);
const parsed: 'admin' | 'basic' = Value.Parse(rolesSchema, ...);
const usersView = pgView('users_view').as((qb) => qb.select().from(users).where(gt(users.age, 18)));
const usersViewSchema = createSelectSchema(usersView);
const parsed: { id: number; name: string; age: number } = Value.Parse(usersViewSchema, ...);
```
- New function: `createUpdateSchema`, for use in updating queries.
```ts copy
import { pgTable, text, integer } from 'drizzle-orm/pg-core';
import { createUpdateSchema } from 'drizzle-typebox';
import { Value } from '@sinclair/typebox/value';
const users = pgTable('users', {
id: integer().generatedAlwaysAsIdentity().primaryKey(),
name: text().notNull(),
age: integer().notNull()
});
const userUpdateSchema = createUpdateSchema(users);
const user = { id: 5, name: 'John' };
const parsed: { name?: string | undefined, age?: number | undefined } = Value.Parse(userUpdateSchema, user); // Error: `id` is a generated column, it can't be updated
const user = { age: 35 };
const parsed: { name?: string | undefined, age?: number | undefined } = Value.Parse(userUpdateSchema, user); // Will parse successfully
await db.update(users).set(parsed).where(eq(users.name, 'Jane'));
```
- New function: `createSchemaFactory`, to provide more advanced options and to avoid bloating the parameters of the other schema functions
```ts copy
import { pgTable, text, integer } from 'drizzle-orm/pg-core';
import { createSchemaFactory } from 'drizzle-typebox';
import { t } from 'elysia'; // Extended Typebox instance
const users = pgTable('users', {
id: integer().generatedAlwaysAsIdentity().primaryKey(),
name: text().notNull(),
age: integer().notNull()
});
const { createInsertSchema } = createSchemaFactory({ typeboxInstance: t });
const userInsertSchema = createInsertSchema(users, {
// We can now use the extended instance
name: (schema) => t.Number({ ...schema }, { error: '`name` must be a string' })
});
```
- Full support for PG arrays
```ts
pg.dataType().array(...);
// Schema
Type.Array(baseDataTypeSchema, { minItems: size, maxItems: size });
```
================================================
FILE: changelogs/drizzle-typebox/0.2.1.md
================================================
# Added support for SingleStore dialect
```ts
import { singlestoreTable, text, int } from 'drizzle-orm/singlestore-core';
import { createSelectSchema } from 'drizzle-typebox';
import { Value } from '@sinclair/typebox/value';
const users = singlestoreTable('users', {
id: int().primaryKey(),
name: text().notNull(),
age: int().notNull()
});
const userSelectSchema = createSelectSchema(users);
const rows = await db.select({ id: users.id, name: users.name }).from(users).limit(1);
const parsed: { id: number; name: string; age: number } = Value.Parse(userSelectSchema, rows[0]); // Error: `age` is not returned in the above query
const rows = await db.select().from(users).limit(1);
const parsed: { id: number; name: string; age: number } = Value.Parse(userSelectSchema, rows[0]); // Will parse successfully
```
# Bug fixes
- [[BUG]: drizzle-typebox infers integer() as TString](https://github.com/drizzle-team/drizzle-orm/issues/3756)
================================================
FILE: changelogs/drizzle-typebox/0.3.0.md
================================================
# Bug fixed and GitHub issue closed
- [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732)
- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869)
================================================
FILE: changelogs/drizzle-typebox/0.3.1.md
================================================
- Exports all types, including internal ones to avoid type issues.
- Properly handle infinitely recursive types in custom JSON column types.
thanks @L-Mario564
================================================
FILE: changelogs/drizzle-typebox/0.3.2.md
================================================
- Functions `getColumns`, `handleColumns` and `handleEnum` were exported from `drizzle-typebox`
================================================
FILE: changelogs/drizzle-typebox/0.3.3.md
================================================
- TS language server performance improvements
================================================
FILE: changelogs/drizzle-valibot/0.1.0.md
================================================
# drizzle-valibot 0.1.0
- Initial release
================================================
FILE: changelogs/drizzle-valibot/0.1.1.md
================================================
- 🐛 Fixed imports in ESM projects
================================================
FILE: changelogs/drizzle-valibot/0.2.0.md
================================================
Use updated types introduced in valibot `0.20.0`:
- `enumType` -> `picklist`
- `nullType` -> `null_`
Minimum supported valibot version is now `0.20.0`.
================================================
FILE: changelogs/drizzle-valibot/0.3.0.md
================================================
This version fully updates `drizzle-valibot` integration and makes sure it's compatible with newer valibot versions
# Breaking Changes
> You must also have Drizzle ORM v0.38.0 or greater and Valibot v1.0.0-beta.7 or greater installed.
- When refining a field, if a schema is provided instead of a callback function, it will ignore the field's nullability and optional status.
- Some data types have more specific schemas for improved validation
# Improvements
Thanks to @L-Mario564 for making more updates than we expected to be shipped in this release. We'll copy his message from a PR regarding improvements made in this release:
- Output for all packages are now unminified, makes exploring the compiled code easier when published to npm.
- Smaller footprint. Previously, we imported the column types at runtime for each dialect, meaning that for example, if you're just using Postgres then you'd likely only have drizzle-orm and drizzle-orm/pg-core in the build output of your app; however, these packages imported all dialects which could lead to mysql-core and sqlite-core being bundled as well even if they're unused in your app. This is now fixed.
- Slight performance gain. To determine the column data type we used the is function which performs a few checks to ensure the column data type matches. This was slow, as these checks would pile up every quickly when comparing all data types for many fields in a table/view. The easier and faster alternative is to simply go off of the column's columnType property.
- Some changes had to be made at the type level in the ORM package for better compatibility with drizzle-valibot.
# New features
- `createSelectSchema` function now also accepts views and enums.
```ts copy
import { pgEnum } from 'drizzle-orm/pg-core';
import { createSelectSchema } from 'drizzle-valibot';
import { parse } from 'valibot';
const roles = pgEnum('roles', ['admin', 'basic']);
const rolesSchema = createSelectSchema(roles);
const parsed: 'admin' | 'basic' = parse(rolesSchema, ...);
const usersView = pgView('users_view').as((qb) => qb.select().from(users).where(gt(users.age, 18)));
const usersViewSchema = createSelectSchema(usersView);
const parsed: { id: number; name: string; age: number } = parse(usersViewSchema, ...);
```
- New function: `createUpdateSchema`, for use in updating queries.
```ts copy
import { pgTable, text, integer } from 'drizzle-orm/pg-core';
import { createUpdateSchema } from 'drizzle-valibot';
import { parse } from 'valibot';
const users = pgTable('users', {
id: integer().generatedAlwaysAsIdentity().primaryKey(),
name: text().notNull(),
age: integer().notNull()
});
const userUpdateSchema = createUpdateSchema(users);
const user = { id: 5, name: 'John' };
const parsed: { name?: string | undefined, age?: number | undefined } = parse(userUpdateSchema, user); // Error: `id` is a generated column, it can't be updated
const user = { age: 35 };
const parsed: { name?: string | undefined, age?: number | undefined } = parse(userUpdateSchema, user); // Will parse successfully
await db.update(users).set(parsed).where(eq(users.name, 'Jane'));
```
- Full support for PG arrays
```ts
pg.dataType().array(...);
// Schema
z.array(baseDataTypeSchema).length(size);
```
================================================
FILE: changelogs/drizzle-valibot/0.3.1.md
================================================
# Added support for SingleStore dialect
```ts
import { singlestoreTable, text, int } from 'drizzle-orm/singlestore-core';
import { createSelectSchema } from 'drizzle-valibot';
import { parse } from 'valibot';
const users = singlestoreTable('users', {
id: int().primaryKey(),
name: text().notNull(),
age: int().notNull()
});
const userSelectSchema = createSelectSchema(users);
const rows = await db.select({ id: users.id, name: users.name }).from(users).limit(1);
const parsed: { id: number; name: string; age: number } = parse(userSelectSchema, rows[0]); // Error: `age` is not returned in the above query
const rows = await db.select().from(users).limit(1);
const parsed: { id: number; name: string; age: number } = parse(userSelectSchema, rows[0]); // Will parse successfully
```
# Bug fixes
- [[BUG]: drizzle-valibot throws Type instantiation is excessively deep and possibly infinite. for refinements](https://github.com/drizzle-team/drizzle-orm/issues/3751)
================================================
FILE: changelogs/drizzle-valibot/0.4.0.md
================================================
# Bug fixed and GitHub issue closed
- [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732)
- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869)
================================================
FILE: changelogs/drizzle-valibot/0.4.1.md
================================================
- Exports all types, including internal ones to avoid type issues.
- Properly handle infinitely recursive types in custom JSON column types.
thanks @L-Mario564
================================================
FILE: changelogs/drizzle-valibot/0.4.2.md
================================================
- TS language server performance improvements
================================================
FILE: changelogs/drizzle-zod/0.1.0.md
================================================
# drizzle-zod 0.1.0
- Initial release
- Added insert schema generation for Postgres
================================================
FILE: changelogs/drizzle-zod/0.1.1.md
================================================
# drizzle-zod 0.1.1
Internal release
================================================
FILE: changelogs/drizzle-zod/0.1.2.md
================================================
# drizzle-zod 0.1.2
- Fix peer dependency error for >=0.16 drizzle packages
================================================
FILE: changelogs/drizzle-zod/0.1.3.md
================================================
# drizzle-zod 0.1.3
- Fix import for 0.17 drizzle-orm
================================================
FILE: changelogs/drizzle-zod/0.1.4.md
================================================
- 🐛 Updated logic for drizzle-orm 0.23.2
================================================
FILE: changelogs/drizzle-zod/0.2.0.md
================================================
- 🎉 Added select schema support
- 🎉 Added SQLite support
- ❗ Changed imports from `drizzle-zod/pg` to `drizzle-zod` for all dialects
================================================
FILE: changelogs/drizzle-zod/0.2.1.md
================================================
- 🐛 Fix insert schemas generation
================================================
FILE: changelogs/drizzle-zod/0.3.0.md
================================================
- 🎉 Added MySQL support
================================================
FILE: changelogs/drizzle-zod/0.3.1.md
================================================
- Fix drizzle-zod default refine type in [479](https://github.com/drizzle-team/drizzle-orm/pull/479) - thanks @hugo-clemente ❤️
================================================
FILE: changelogs/drizzle-zod/0.3.2.md
================================================
- 🐛 Fixed a bug in schema types inference
================================================
FILE: changelogs/drizzle-zod/0.4.0.md
================================================
# ESM support
- 🎉 Added ESM support! You can now use `drizzle-zod` in both ESM and CJS environments.
- 🎉 Added code minification and source maps.
================================================
FILE: changelogs/drizzle-zod/0.4.1.md
================================================
- 🐛 Add "exports" field to package.json
================================================
FILE: changelogs/drizzle-zod/0.4.2.md
================================================
- 🐛 Fixed autoincrement columns not being optional in drizzle-zod (#652)
- 🐛 Added length check for text fields in drizzle-zod (#658)
================================================
FILE: changelogs/drizzle-zod/0.4.3.md
================================================
- 🎉 Added PgDateString to drizzle-zod (#665)
================================================
FILE: changelogs/drizzle-zod/0.4.4.md
================================================
- Fixed drizzle-zod not enforcing string lengths (#691) by @TiltedToast
================================================
FILE: changelogs/drizzle-zod/0.5.0.md
================================================
- Added compatibility with Drizzle 0.28.0
================================================
FILE: changelogs/drizzle-zod/0.5.1.md
================================================
- 🐛 Fixed imports in ESM projects
================================================
FILE: changelogs/drizzle-zod/0.6.0.md
================================================
This version fully updates `drizzle-zod` integration and makes sure it's compatible with newer zod versions
# Breaking Changes
> You must also have Drizzle ORM v0.38.0 or greater and Zod v3.0.0 or greater installed.
- When refining a field, if a schema is provided instead of a callback function, it will ignore the field's nullability and optional status.
- Some data types have more specific schemas for improved validation
# Improvements
Thanks to @L-Mario564 for making more updates than we expected to be shipped in this release. We'll copy his message from a PR regarding improvements made in this release:
- Output for all packages are now unminified, makes exploring the compiled code easier when published to npm.
- Smaller footprint. Previously, we imported the column types at runtime for each dialect, meaning that for example, if you're just using Postgres then you'd likely only have drizzle-orm and drizzle-orm/pg-core in the build output of your app; however, these packages imported all dialects which could lead to mysql-core and sqlite-core being bundled as well even if they're unused in your app. This is now fixed.
- Slight performance gain. To determine the column data type we used the is function which performs a few checks to ensure the column data type matches. This was slow, as these checks would pile up every quickly when comparing all data types for many fields in a table/view. The easier and faster alternative is to simply go off of the column's columnType property.
# New features
- `createSelectSchema` function now also accepts views and enums.
```ts copy
import { pgEnum } from 'drizzle-orm/pg-core';
import { createSelectSchema } from 'drizzle-zod';
const roles = pgEnum('roles', ['admin', 'basic']);
const rolesSchema = createSelectSchema(roles);
const parsed: 'admin' | 'basic' = rolesSchema.parse(...);
const usersView = pgView('users_view').as((qb) => qb.select().from(users).where(gt(users.age, 18)));
const usersViewSchema = createSelectSchema(usersView);
const parsed: { id: number; name: string; age: number } = usersViewSchema.parse(...);
```
- New function: `createUpdateSchema`, for use in updating queries.
```ts copy
import { pgTable, text, integer } from 'drizzle-orm/pg-core';
import { createUpdateSchema } from 'drizzle-zod';
const users = pgTable('users', {
id: integer().generatedAlwaysAsIdentity().primaryKey(),
name: text().notNull(),
age: integer().notNull()
});
const userUpdateSchema = createUpdateSchema(users);
const user = { id: 5, name: 'John' };
const parsed: { name?: string | undefined, age?: number | undefined } = userUpdateSchema.parse(user); // Error: `id` is a generated column, it can't be updated
const user = { age: 35 };
const parsed: { name?: string | undefined, age?: number | undefined } = userUpdateSchema.parse(user); // Will parse successfully
await db.update(users).set(parsed).where(eq(users.name, 'Jane'));
```
- New function: `createSchemaFactory`, to provide more advanced options and to avoid bloating the parameters of the other schema functions
```ts copy
import { pgTable, text, integer } from 'drizzle-orm/pg-core';
import { createSchemaFactory } from 'drizzle-zod';
import { z } from '@hono/zod-openapi'; // Extended Zod instance
const users = pgTable('users', {
id: integer().generatedAlwaysAsIdentity().primaryKey(),
name: text().notNull(),
age: integer().notNull()
});
const { createInsertSchema } = createSchemaFactory({ zodInstance: z });
const userInsertSchema = createInsertSchema(users, {
// We can now use the extended instance
name: (schema) => schema.openapi({ example: 'John' })
});
```
- Full support for PG arrays
```ts
pg.dataType().array(...);
// Schema
z.array(baseDataTypeSchema).length(size);
```
================================================
FILE: changelogs/drizzle-zod/0.6.1.md
================================================
# New Features
## Added support for SingleStore dialect
```ts
import { singlestoreTable, text, int } from 'drizzle-orm/singlestore-core';
import { createSelectSchema } from 'drizzle-zod';
const users = singlestoreTable('users', {
id: int().primaryKey(),
name: text().notNull(),
age: int().notNull()
});
const userSelectSchema = createSelectSchema(users);
const rows = await db.select({ id: users.id, name: users.name }).from(users).limit(1);
const parsed: { id: number; name: string; age: number } = userSelectSchema.parse(rows[0]); // Error: `age` is not returned in the above query
const rows = await db.select().from(users).limit(1);
const parsed: { id: number; name: string; age: number } = userSelectSchema.parse(rows[0]); // Will parse successfully
```
# Bug fixes
- [[BUG]: refining schema using createSelectSchema is not working with drizzle-kit 0.6.0](https://github.com/drizzle-team/drizzle-orm/issues/3735)
- [[BUG]: drizzle-zod inferring types incorrectly](https://github.com/drizzle-team/drizzle-orm/issues/3734)
================================================
FILE: changelogs/drizzle-zod/0.7.0.md
================================================
# Improvements
## Added type coercion support
**Use case: Type coercion**
```ts copy
import { pgTable, timestamp } from 'drizzle-orm/pg-core';
import { createSchemaFactory } from 'drizzle-zod';
import { z } from 'zod';
const users = pgTable('users', {
...,
createdAt: timestamp().notNull()
});
const { createInsertSchema } = createSchemaFactory({
// This configuration will only coerce dates. Set `coerce` to `true` to coerce all data types or specify others
coerce: {
date: true
}
});
const userInsertSchema = createInsertSchema(users);
// The above is the same as this:
const userInsertSchema = z.object({
...,
createdAt: z.coerce.date()
});
```
# Bug fixed and GitHub issue closed
- [[BUG]: Cannot use schema.coerce on refining drizzle-zod types](https://github.com/drizzle-team/drizzle-orm/issues/3842)
- [[FEATURE]: Type Coercion in drizzle-zod](https://github.com/drizzle-team/drizzle-orm/issues/776)
- [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732)
- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869)
================================================
FILE: changelogs/drizzle-zod/0.7.1.md
================================================
### Bug fixes
- [[BUG]: createInsertSchema from drizzle-zod@0.6.1 does not infer types correctly but returns unknown for every value](https://github.com/drizzle-team/drizzle-orm/issues/3907)
- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869)
thanks @L-Mario564
================================================
FILE: changelogs/drizzle-zod/0.8.0.md
================================================
- Support for Zod v4: Starting with this release, `drizzle-zod` now requires Zod v3.25 or later
================================================
FILE: changelogs/drizzle-zod/0.8.1.md
================================================
- Support for Zod v4: Starting with this release, `drizzle-zod` now requires Zod v3.25.1 or later
This version was released to resolve several compatibility issues with the `ZodObject` type, which were fixed in `drizzle-orm@0.8.1`, so version `0.8.0` can be skipped
================================================
FILE: changelogs/drizzle-zod/0.8.2.md
================================================
- [[BUG]: drizzle-zod: incorrect inferred types for columns .generatedAlwaysAsIdentity()](https://github.com/drizzle-team/drizzle-orm/issues/4553)
================================================
FILE: changelogs/drizzle-zod/0.8.3.md
================================================
- Update peerDeps for zod
================================================
FILE: changelogs/eslint-plugin-drizzle/0.2.0.md
================================================
# eslint-plugin-drizzle 0.1.0
- Initial release
- 2 rules available
================================================
FILE: changelogs/eslint-plugin-drizzle/0.2.1.md
================================================
# eslint-plugin-drizzle 0.2.1
- Update README.md
- Change error text message
================================================
FILE: changelogs/eslint-plugin-drizzle/0.2.2.md
================================================
# eslint-plugin-drizzle 0.2.2
- fix: Correct detection of `drizzleObjectName` when it's a nested object
================================================
FILE: changelogs/eslint-plugin-drizzle/0.2.3.md
================================================
# eslint-plugin-drizzle 0.2.3
- Added better context to the suggestion in the error message
- fix: Correct detection of `drizzleObjectName` when it's retrieved from or is a function
- chore: Refactored duplicate code in `utils/options.ts` into `isDrizzleObjName` function
================================================
FILE: docs/custom-types.lite.md
================================================
# Common way of defining custom types
> [!NOTE]
> For more advanced documentation about defining custom data types in PostgreSQL and MySQL, please check [`custom-types.md`](custom-types.md).
## Examples
Best way to see, how customType definition is working - is to check how existing data types in postgres and mysql could be defined using `customType` function from Drizzle ORM
### Postgres Data Types using `node-postgres` driver
---
#### **Serial**
```typescript
const customSerial = customType<{ data: number; notNull: true; default: true }>(
{
dataType() {
return 'serial';
},
},
);
```
#### **Text**
```typescript
const customText = customType<{ data: string }>({
dataType() {
return 'text';
},
});
```
#### **Boolean**
```typescript
const customBoolean = customType<{ data: boolean }>({
dataType() {
return 'boolean';
},
});
```
#### **Jsonb**
```typescript
const customJsonb = (name: string) =>
customType<{ data: TData; driverData: string }>({
dataType() {
return 'jsonb';
},
toDriver(value: TData): string {
return JSON.stringify(value);
},
})(name);
```
#### **Timestamp**
```typescript
const customTimestamp = customType<
{
data: Date;
driverData: string;
config: { withTimezone: boolean; precision?: number };
}
>({
dataType(config) {
const precision = typeof config.precision !== 'undefined'
? ` (${config.precision})`
: '';
return `timestamp${precision}${
config.withTimezone ? ' with time zone' : ''
}`;
},
fromDriver(value: string): Date {
return new Date(value);
},
});
```
#### Usage for all types will be same as defined functions in Drizzle ORM
```typescript
const usersTable = pgTable('users', {
id: customSerial('id').primaryKey(),
name: customText('name').notNull(),
verified: customBoolean('verified').notNull().default(false),
jsonb: customJsonb('jsonb'),
createdAt: customTimestamp('created_at', { withTimezone: true }).notNull()
.default(sql`now()`),
});
```
### MySql Data Types using `mysql2` driver
---
#### **Serial**
```typescript
const customSerial = customType<{ data: number; notNull: true; default: true }>(
{
dataType() {
return 'serial';
},
},
);
```
#### **Text**
```typescript
const customText = customType<{ data: string }>({
dataType() {
return 'text';
},
});
```
#### **Boolean**
```typescript
const customBoolean = customType<{ data: boolean }>({
dataType() {
return 'boolean';
},
fromDriver(value) {
if (typeof value === 'boolean') {
return value;
}
return value === 1;
},
});
```
#### **Json**
```typescript
const customJson = (name: string) =>
customType<{ data: TData; driverData: string }>({
dataType() {
return 'json';
},
toDriver(value: TData): string {
return JSON.stringify(value);
},
})(name);
```
#### **Timestamp**
```typescript
const customTimestamp = customType<
{ data: Date; driverData: string; config: { fsp: number } }
>({
dataType(config) {
const precision = typeof config.fsp !== 'undefined'
? ` (${config.fsp})`
: '';
return `timestamp${precision}`;
},
fromDriver(value: string): Date {
return new Date(value);
},
});
```
#### Usage for all types will be same as defined functions in Drizzle ORM
```typescript
const usersTable = mysqlTable('userstest', {
id: customSerial('id').primaryKey(),
name: customText('name').notNull(),
verified: customBoolean('verified').notNull().default(false),
jsonb: customJson('jsonb'),
createdAt: customTimestamp('created_at', { fsp: 2 }).notNull().default(
sql`now()`,
),
});
```
You can check ts-doc for types and param definition
````typescript
export type CustomTypeValues = {
/**
* Required type for custom column, that will infer proper type model
*
* Examples:
*
* If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar`
*
* If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer`
*/
data: unknown;
/**
* Type helper, that represents what type database driver is accepting for specific database data type
*/
driverData?: unknown;
/**
* What config type should be used for {@link CustomTypeParams} `dataType` generation
*/
config?: unknown;
/**
* Whether the config argument should be required or not
* @default false
*/
configRequired?: boolean;
/**
* If your custom data type should be notNull by default you can use `notNull: true`
*
* @example
* const customSerial = customType<{ data: number, notNull: true, default: true }>({
* dataType() {
* return 'serial';
* },
* });
*/
notNull?: boolean;
/**
* If your custom data type has default you can use `default: true`
*
* @example
* const customSerial = customType<{ data: number, notNull: true, default: true }>({
* dataType() {
* return 'serial';
* },
* });
*/
default?: boolean;
};
export interface CustomTypeParams> {
/**
* Database data type string representation, that is used for migrations
* @example
* ```
* `jsonb`, `text`
* ```
*
* If database data type needs additional params you can use them from `config` param
* @example
* ```
* `varchar(256)`, `numeric(2,3)`
* ```
*
* To make `config` be of specific type please use config generic in {@link CustomTypeValues}
*
* @example
* Usage example
* ```
* dataType() {
* return 'boolean';
* },
* ```
* Or
* ```
* dataType(config) {
* return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`;
* }
* ```
*/
dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string;
/**
* Optional mapping function, between user input and driver
* @example
* For example, when using jsonb we need to map JS/TS object to string before writing to database
* ```
* toDriver(value: TData): string {
* return JSON.stringify(value);
* }
* ```
*/
toDriver?: (value: T['data']) => T['driverData'] | SQL;
/**
* Optional mapping function, that is responsible for data mapping from database to JS/TS code
* @example
* For example, when using timestamp we need to map string Date representation to JS Date
* ```
* fromDriver(value: string): Date {
* return new Date(value);
* },
* ```
*/
fromDriver?: (value: T['driverData']) => T['data'];
}
````
================================================
FILE: docs/custom-types.md
================================================
# How to define custom types
Drizzle ORM has a big set of predefined column types for different SQL databases. But still there are additional types that are not supported by Drizzle ORM (yet). That could be native pg types or extension types
Here are some instructions on how to create and use your own types with Drizzle ORM
---
## Abstract view on column builder pattern in Drizzle ORM
Each type creation should use 2 classes:
- `ColumnBuilder` - class, that is responsible for generating whole set of needed fields for column creation
- `Column` - class, that is representing Columns itself, that is used in query generation, migration mapping, etc.
Each module has it's own class, representing `ColumnBuilder` or `Column`:
- For `pg` -> `PgColumnBuilder` and `PgColumn`
- For `mysql` -> `MySqlColumnBuilder` and `MySqlColumn`
- For `sqlite` -> `SQLiteColumnBuilder` and `SQLiteColumn`
### Builder class explanation - (postgresql text data type example)
- Builder class is responsible for storing TS return type for specific database datatype and override build function to return ready to use column in table
- `TData` - extends return type for column. Current example will infer string type for current datatype used in schema definition
```typescript
export class PgTextBuilder
extends PgColumnBuilder<
ColumnBuilderConfig<{ data: TData; driverParam: string }>
>
{
build(
table: AnyPgTable<{ name: TTableName }>,
): PgText {
return new PgText(table, this.config);
}
}
```
> [!WARNING]
> `$pgColumnBuilderBrand` should be changed and be equal to class name for new data type builder
### Column class explanation - (postgresql text data type example)
---
Column class has set of types/functions, that could be overridden to get needed behavior for custom type
- `TData` - extends return type for column. Current example will infer string type for current datatype used in schema definition
- `getSQLType()` - function, that shows datatype name in database and will be used in migration generation
- `mapFromDriverValue()` - interceptor between database and select query execution. If you want to modify/map/change value for specific data type, it could be done here
#### Usage example for jsonb type
```typescript
override mapToDriverValue(value: TData): string {
return JSON.stringify(value);
}
```
- `mapToDriverValue` - interceptor between user input for insert/update queries and database query. If you want to modify/map/change value for specific data type, it could be done here
#### Usage example for int type
```typescript
override mapFromDriverValue(value: number | string): number {
if (typeof value === 'string') {
return parseInt(value);
}
return value;
}
```
#### Column class example
```typescript
export class PgText
extends PgColumn> {
constructor(table: AnyPgTable<{ name: TTableName }>, builder: PgTextBuilder['config']) {
super(table, builder);
}
getSQLType(): string {
return 'text';
}
override mapFromDriverValue(value: string): TData {
return value as TData
}
override mapToDriverValue(value: TData): string {
return value
}
}
```
> [!WARNING]
> `$pgColumnBrand` should be changed and be equal to class name for new data type
### Full text data type for PostgreSQL example
For more postgres data type examples you could check [here](/drizzle-orm/src/pg-core/columns)
```typescript
import { ColumnConfig, ColumnBuilderConfig } from 'drizzle-orm';
import { AnyPgTable } from 'drizzle-orm/pg-core';
import { PgColumn, PgColumnBuilder } from './common';
export class PgTextBuilder
extends PgColumnBuilder<
ColumnBuilderConfig<{ data: TData; driverParam: string }>
>
{
build(
table: AnyPgTable<{ name: TTableName }>,
): PgText {
return new PgText(table, this.config);
}
}
export class PgText
extends PgColumn<
ColumnConfig<{ tableName: TTableName; data: TData; driverParam: string }>
>
{
constructor(
table: AnyPgTable<{ name: TTableName }>,
builder: PgTextBuilder['config'],
) {
super(table, builder);
}
getSQLType(): string {
return 'text';
}
}
export function text(
name: string,
): PgTextBuilder {
return new PgTextBuilder(name);
}
```
## Custom data type example
> [!NOTE]
> We will check example on pg module, but current pattern applies to all dialects, that are currently supported by Drizzle ORM
### Setting up CITEXT datatype
> [!NOTE]
> This type is available only with extensions and used for example, just to show how you could setup any data type you want. Extension support will come soon
### CITEXT data type example
```typescript
export class PgCITextBuilder extends PgColumnBuilder<
PgColumnBuilderHKT,
ColumnBuilderConfig<{ data: TData; driverParam: string }>
> {
protected $pgColumnBuilderBrand: string = 'PgCITextBuilder';
build(table: AnyPgTable<{ name: TTableName }>): PgCIText {
return new PgCIText(table, this.config);
}
}
export class PgCIText
extends PgColumn>
{
constructor(table: AnyPgTable<{ name: TTableName }>, builder: PgCITextBuilder['config']) {
super(table, builder);
}
getSQLType(): string {
return 'citext';
}
}
export function citext(name: string): PgCITextBuilder {
return new PgCITextBuilder(name);
}
```
#### Usage example
```typescript
const table = pgTable('table', {
id: integer('id').primaryKey(),
ciname: citext('ciname')
})
```
## Contributing by adding new custom types in Drizzle ORM
You could add your created custom data types to Drizzle ORM, so everyone can use it.
Each data type should be placed in separate file in `columns` folder and PR open with tag `new-data-type:pg` | `new-data-type:sqlite` | `new-data-type:mysql`
For more Contribution information - please check [CONTRIBUTING.md](../CONTRIBUTING.md)
================================================
FILE: docs/joins.md
================================================
# Drizzle ORM - Joins
As with other parts of Drizzle ORM, the joins syntax is a balance between the SQL-likeness and type safety.
Here's an example of how a common "one-to-many" relationship can be modelled.
```typescript
const users = pgTable('users', {
id: serial('id').primaryKey(),
firstName: text('first_name').notNull(),
lastName: text('last_name'),
cityId: int('city_id').references(() => cities.id),
});
const cities = pgTable('cities', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
});
```
Now, let's select all cities with all users that live in that city.
This is how you'd write it in raw SQL:
```sql
select
cities.id as city_id,
cities.name as city_name,
users.id as user_id,
users.first_name,
users.last_name
from cities
left join users on users.city_id = cities.id
```
And here's how to do the same with Drizzle ORM:
```typescript
const rows = await db
.select({
cityId: cities.id,
cityName: cities.name,
userId: users.id,
firstName: users.firstName,
lastName: users.lastName,
})
.from(cities)
.leftJoin(users, eq(users.cityId, cities.id));
```
`rows` will have the following type:
```typescript
{
cityId: number;
cityName: string;
userId: number | null;
firstName: string | null;
lastName: string | null;
}[]
```
As you can see, all the joined columns have been nullified. This might do the trick if you're using joins to form a single row of results, but in our case we have two separate entities in our row - a city and a user.
It might not be very convenient to check every field for nullability separately (or, even worse, just add an `!` after every field to "make compiler happy"). It would be much more useful if you could somehow run a single check
to verify that the user was joined and all of its fields are available.
**To achieve that, you can group the fields of a certain table in a nested object inside of `.select()`:**
```typescript
const rows = await db
.select({
cityId: cities.id,
cityName: cities.name,
user: {
id: users.id,
firstName: users.firstName,
lastName: users.lastName,
},
})
.from(cities)
.leftJoin(users, eq(users.cityId, cities.id));
```
In that case, the ORM will use dark TypeScript magic (as if it wasn't already) and figure out that you have a nested object where all the fields belong to the same table. So, the `rows` type will now look like this:
```typescript
{
cityId: number;
cityName: string;
user: {
id: number;
firstName: string;
lastName: string | null;
} | null;
}
```
This is much more convenient! Now, you can just do a single check for `row.user !== null`, and all the user fields will become available.
---
Note that you can group any fields in a nested object however you like, but the single check optimization will only be applied to a certain nested object if all its fields belong to the same table.
So, for example, you can group the city fields, too:
```typescript
.select({
city: {
id: cities.id,
name: cities.name,
},
user: {
id: users.id,
firstName: users.firstName,
lastName: users.lastName,
},
})
```
And the result type will look like this:
```typescript
{
city: {
id: number;
name: string;
};
user: {
id: number;
firstName: string;
lastName: string | null;
} | null;
}
```
---
If you just need all the fields from all the tables you're selecting and joining, you can simply omit the argument of the `.select()` method altogether:
```typescript
const rows = await db.select().from(cities).leftJoin(users, eq(users.cityId, cities.id));
```
> [!NOTE]
> In this case, the Drizzle table/column names will be used as the keys in the result object.
```typescript
{
cities: {
id: number;
name: string;
};
users: {
id: number;
firstName: string;
lastName: string | null;
cityId: number | null;
} | null;
}[]
```
---
There are cases where you'd want to select all the fields from one table, but pick fields from others. In that case, instead of listing all the table fields, you can just pass a table:
```typescript
.select({
cities, // shorthand for "cities: cities", the key can be anything
user: {
firstName: users.firstName,
},
})
```
```typescript
{
cities: {
id: number;
name: string;
};
user: {
firstName: string;
} | null;
}
```
---
But what happens if you group columns from multiple tables in the same nested object? Nothing, really - they will still be all individually nullable, just grouped under the same object (as you might expect!):
```typescript
.select({
id: cities.id,
cityAndUser: {
cityName: cities.name,
userId: users.id,
firstName: users.firstName,
lastName: users.lastName,
}
})
```
```typescript
{
id: number;
cityAndUser: {
cityName: string;
userId: number | null;
firstName: string | null;
lastName: string | null;
};
}
```
## Aggregating results
OK, so you have obtained all the cities and the users for every city. But what you **really** wanted is a **list** of users for every city, and what you currently have is an array of `city-user?` combinations. So, how do you transform it?
That's the neat part - you can do that however you'd like! No hand-holding here.
For example, one of the ways to do that would be `Array.reduce()`:
```typescript
import { InferModel } from 'drizzle-orm';
type User = InferModel;
type City = InferModel;
const rows = await db
.select({
city: cities,
user: users,
})
.from(cities)
.leftJoin(users, eq(users.cityId, cities.id));
const result = rows.reduce>(
(acc, row) => {
const city = row.city;
const user = row.user;
if (!acc[city.id]) {
acc[city.id] = { city, users: [] };
}
if (user) {
acc[city.id].users.push(user);
}
return acc;
},
{},
);
```
================================================
FILE: docs/table-introspect-api.md
================================================
# Table introspect API
## Get table information
```ts
import { pgTable, getTableConfig } from 'drizzle-orm/pg-core';
const table = pgTable(...);
const {
columns,
indexes,
foreignKeys,
checks,
primaryKeys,
name,
schema,
} = getTableConfig(table);
```
## Get table columns map
```ts
import { pgTable, getTableColumns } from 'drizzle-orm/pg-core';
const table = pgTable('table', {
id: integer('id').primaryKey(),
name: text('name'),
});
const columns/*: { id: ..., name: ... } */ = getTableColumns(table);
```
================================================
FILE: dprint.json
================================================
{
"typescript": {
"useTabs": true,
"quoteStyle": "preferSingle",
"quoteProps": "asNeeded",
"arrowFunction.useParentheses": "force",
"jsx.quoteStyle": "preferSingle"
},
"json": {
"useTabs": true
},
"markdown": {},
"includes": ["**/*.{ts,tsx,js,jsx,cjs,mjs,json}"],
"excludes": [
"**/node_modules",
"dist",
"dist-dts",
"dist.new",
"**/drizzle/**/meta",
"**/drizzle2/**/meta",
"**/*snapshot.json",
"**/_journal.json",
"**/tsup.config*.mjs",
"**/.sst",
"integration-tests/tests/prisma/*/client",
"integration-tests/tests/prisma/*/drizzle"
],
"plugins": [
"https://plugins.dprint.dev/typescript-0.91.1.wasm",
"https://plugins.dprint.dev/json-0.19.3.wasm",
"https://plugins.dprint.dev/markdown-0.17.1.wasm"
]
}
================================================
FILE: drizzle-arktype/README.md
================================================
`drizzle-arktype` is a plugin for [Drizzle ORM](https://github.com/drizzle-team/drizzle-orm) that allows you to generate [arktype](https://arktype.io/) schemas from Drizzle ORM schemas.
**Features**
- Create a select schema for tables, views and enums.
- Create insert and update schemas for tables.
- Supports all dialects: PostgreSQL, MySQL and SQLite.
# Usage
```ts
import { pgEnum, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core';
import { createInsertSchema, createSelectSchema } from 'drizzle-arktype';
import { type } from 'arktype';
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
email: text('email').notNull(),
role: text('role', { enum: ['admin', 'user'] }).notNull(),
createdAt: timestamp('created_at').notNull().defaultNow(),
});
// Schema for inserting a user - can be used to validate API requests
const insertUserSchema = createInsertSchema(users);
// Schema for updating a user - can be used to validate API requests
const updateUserSchema = createUpdateSchema(users);
// Schema for selecting a user - can be used to validate API responses
const selectUserSchema = createSelectSchema(users);
// Overriding the fields
const insertUserSchema = createInsertSchema(users, {
role: type('string'),
});
// Refining the fields - useful if you want to change the fields before they become nullable/optional in the final schema
const insertUserSchema = createInsertSchema(users, {
id: (schema) => schema.atLeast(1),
role: type('string'),
});
// Usage
const isUserValid = parse(insertUserSchema, {
name: 'John Doe',
email: 'johndoe@test.com',
role: 'admin',
});
```
================================================
FILE: drizzle-arktype/benchmarks/types.ts
================================================
import { bench, setup } from '@ark/attest';
import { type } from 'arktype';
import { boolean, integer, pgTable, text } from 'drizzle-orm/pg-core';
import { createSelectSchema } from '~/index.ts';
const users = pgTable('users', {
id: integer().primaryKey(),
firstName: text().notNull(),
middleName: text(),
lastName: text().notNull(),
age: integer().notNull(),
admin: boolean().notNull().default(false),
});
const teardown = setup();
bench('select schema', () => {
return createSelectSchema(users);
}).types([13129, 'instantiations']);
bench('select schema with refinements', () => {
return createSelectSchema(users, {
firstName: (t) => t.atMostLength(100),
middleName: (t) => t.atMostLength(100),
lastName: (t) => t.atMostLength(100),
age: type.number.atLeast(1),
});
}).types([21631, 'instantiations']);
teardown();
================================================
FILE: drizzle-arktype/package.json
================================================
{
"name": "drizzle-arktype",
"version": "0.1.3",
"description": "Generate arktype schemas from Drizzle ORM schemas",
"type": "module",
"scripts": {
"build": "tsx scripts/build.ts",
"b": "pnpm build",
"test:types": "cd tests && tsc",
"pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz",
"publish": "npm publish package.tgz",
"test": "vitest run",
"bench:types": "tsx ./benchmarks/types.ts"
},
"exports": {
".": {
"import": {
"types": "./index.d.mts",
"default": "./index.mjs"
},
"require": {
"types": "./index.d.cjs",
"default": "./index.cjs"
},
"types": "./index.d.ts",
"default": "./index.mjs"
}
},
"main": "./index.cjs",
"module": "./index.mjs",
"types": "./index.d.ts",
"publishConfig": {
"provenance": true
},
"repository": {
"type": "git",
"url": "git+https://github.com/drizzle-team/drizzle-orm.git"
},
"keywords": [
"arktype",
"validate",
"validation",
"schema",
"drizzle",
"orm",
"pg",
"mysql",
"postgresql",
"postgres",
"sqlite",
"database",
"sql",
"typescript",
"ts"
],
"author": "Drizzle Team",
"license": "Apache-2.0",
"peerDependencies": {
"arktype": ">=2.0.0",
"drizzle-orm": ">=0.36.0"
},
"devDependencies": {
"@ark/attest": "^0.45.8",
"@rollup/plugin-typescript": "^11.1.0",
"@types/node": "^18.15.10",
"arktype": "^2.1.10",
"cpy": "^10.1.0",
"drizzle-orm": "link:../drizzle-orm/dist",
"json-rules-engine": "7.3.1",
"rimraf": "^5.0.0",
"rollup": "^3.29.5",
"tsx": "^4.19.3",
"vite-tsconfig-paths": "^4.3.2",
"vitest": "^3.1.3",
"zx": "^7.2.2"
}
}
================================================
FILE: drizzle-arktype/rollup.config.ts
================================================
import typescript from '@rollup/plugin-typescript';
import { defineConfig } from 'rollup';
export default defineConfig([
{
input: 'src/index.ts',
output: [
{
format: 'esm',
dir: 'dist',
entryFileNames: '[name].mjs',
chunkFileNames: '[name]-[hash].mjs',
sourcemap: true,
},
{
format: 'cjs',
dir: 'dist',
entryFileNames: '[name].cjs',
chunkFileNames: '[name]-[hash].cjs',
sourcemap: true,
},
],
external: [
/^drizzle-orm\/?/,
'arktype',
],
plugins: [
typescript({
tsconfig: 'tsconfig.build.json',
}),
],
},
]);
================================================
FILE: drizzle-arktype/scripts/build.ts
================================================
#!/usr/bin/env -S pnpm tsx
import 'zx/globals';
import cpy from 'cpy';
await fs.remove('dist');
await $`rollup --config rollup.config.ts --configPlugin typescript`;
await $`resolve-tspaths`;
await fs.copy('README.md', 'dist/README.md');
await cpy('dist/**/*.d.ts', 'dist', {
rename: (basename) => basename.replace(/\.d\.ts$/, '.d.mts'),
});
await cpy('dist/**/*.d.ts', 'dist', {
rename: (basename) => basename.replace(/\.d\.ts$/, '.d.cts'),
});
await fs.copy('package.json', 'dist/package.json');
await $`scripts/fix-imports.ts`;
================================================
FILE: drizzle-arktype/scripts/fix-imports.ts
================================================
#!/usr/bin/env -S pnpm tsx
import 'zx/globals';
import path from 'node:path';
import { parse, print, visit } from 'recast';
import parser from 'recast/parsers/typescript';
function resolvePathAlias(importPath: string, file: string) {
if (importPath.startsWith('~/')) {
const relativePath = path.relative(path.dirname(file), path.resolve('dist.new', importPath.slice(2)));
importPath = relativePath.startsWith('.') ? relativePath : './' + relativePath;
}
return importPath;
}
function fixImportPath(importPath: string, file: string, ext: string) {
importPath = resolvePathAlias(importPath, file);
if (!/\..*\.(js|ts)$/.test(importPath)) {
return importPath;
}
return importPath.replace(/\.(js|ts)$/, ext);
}
const cjsFiles = await glob('dist/**/*.{cjs,d.cts}');
await Promise.all(cjsFiles.map(async (file) => {
const code = parse(await fs.readFile(file, 'utf8'), { parser });
visit(code, {
visitImportDeclaration(path) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs');
this.traverse(path);
},
visitExportAllDeclaration(path) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs');
this.traverse(path);
},
visitExportNamedDeclaration(path) {
if (path.value.source) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs');
}
this.traverse(path);
},
visitCallExpression(path) {
if (path.value.callee.type === 'Identifier' && path.value.callee.name === 'require') {
path.value.arguments[0].value = fixImportPath(path.value.arguments[0].value, file, '.cjs');
}
this.traverse(path);
},
visitTSImportType(path) {
path.value.argument.value = resolvePathAlias(path.value.argument.value, file);
this.traverse(path);
},
visitAwaitExpression(path) {
if (print(path.value).code.startsWith(`await import("./`)) {
path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.cjs');
}
this.traverse(path);
},
});
await fs.writeFile(file, print(code).code);
}));
let esmFiles = await glob('dist/**/*.{js,d.ts}');
await Promise.all(esmFiles.map(async (file) => {
const code = parse(await fs.readFile(file, 'utf8'), { parser });
visit(code, {
visitImportDeclaration(path) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.js');
this.traverse(path);
},
visitExportAllDeclaration(path) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.js');
this.traverse(path);
},
visitExportNamedDeclaration(path) {
if (path.value.source) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.js');
}
this.traverse(path);
},
visitTSImportType(path) {
path.value.argument.value = fixImportPath(path.value.argument.value, file, '.js');
this.traverse(path);
},
visitAwaitExpression(path) {
if (print(path.value).code.startsWith(`await import("./`)) {
path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.js');
}
this.traverse(path);
},
});
await fs.writeFile(file, print(code).code);
}));
esmFiles = await glob('dist/**/*.{mjs,d.mts}');
await Promise.all(esmFiles.map(async (file) => {
const code = parse(await fs.readFile(file, 'utf8'), { parser });
visit(code, {
visitImportDeclaration(path) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs');
this.traverse(path);
},
visitExportAllDeclaration(path) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs');
this.traverse(path);
},
visitExportNamedDeclaration(path) {
if (path.value.source) {
path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs');
}
this.traverse(path);
},
visitTSImportType(path) {
path.value.argument.value = fixImportPath(path.value.argument.value, file, '.mjs');
this.traverse(path);
},
visitAwaitExpression(path) {
if (print(path.value).code.startsWith(`await import("./`)) {
path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.mjs');
}
this.traverse(path);
},
});
await fs.writeFile(file, print(code).code);
}));
================================================
FILE: drizzle-arktype/src/column.ts
================================================
import { type Type, type } from 'arktype';
import type { Column, ColumnBaseConfig } from 'drizzle-orm';
import type {
MySqlBigInt53,
MySqlChar,
MySqlDouble,
MySqlFloat,
MySqlInt,
MySqlMediumInt,
MySqlReal,
MySqlSerial,
MySqlSmallInt,
MySqlText,
MySqlTinyInt,
MySqlVarChar,
MySqlYear,
} from 'drizzle-orm/mysql-core';
import type {
PgArray,
PgBigInt53,
PgBigSerial53,
PgBinaryVector,
PgChar,
PgDoublePrecision,
PgGeometry,
PgGeometryObject,
PgHalfVector,
PgInteger,
PgLineABC,
PgLineTuple,
PgPointObject,
PgPointTuple,
PgReal,
PgSerial,
PgSmallInt,
PgSmallSerial,
PgUUID,
PgVarchar,
PgVector,
} from 'drizzle-orm/pg-core';
import type {
SingleStoreBigInt53,
SingleStoreChar,
SingleStoreDouble,
SingleStoreFloat,
SingleStoreInt,
SingleStoreMediumInt,
SingleStoreReal,
SingleStoreSerial,
SingleStoreSmallInt,
SingleStoreText,
SingleStoreTinyInt,
SingleStoreVarChar,
SingleStoreYear,
} from 'drizzle-orm/singlestore-core';
import type { SQLiteInteger, SQLiteReal, SQLiteText } from 'drizzle-orm/sqlite-core';
import { CONSTANTS } from './constants.ts';
import { isColumnType, isWithEnum } from './utils.ts';
export const literalSchema = type.string.or(type.number).or(type.boolean).or(type.null);
export const jsonSchema = literalSchema.or(type.unknown.as().array()).or(type.object.as>());
export const bufferSchema = type.unknown.narrow((value) => value instanceof Buffer).as().describe( // eslint-disable-line no-instanceof/no-instanceof
'a Buffer instance',
);
export function columnToSchema(column: Column): Type {
let schema!: Type;
if (isWithEnum(column)) {
schema = column.enumValues.length ? type.enumerated(...column.enumValues) : type.string;
}
if (!schema) {
// Handle specific types
if (isColumnType | PgPointTuple>(column, ['PgGeometry', 'PgPointTuple'])) {
schema = type([type.number, type.number]);
} else if (
isColumnType | PgGeometryObject>(column, ['PgGeometryObject', 'PgPointObject'])
) {
schema = type({
x: type.number,
y: type.number,
});
} else if (isColumnType | PgVector>(column, ['PgHalfVector', 'PgVector'])) {
schema = column.dimensions
? type.number.array().exactlyLength(column.dimensions)
: type.number.array();
} else if (isColumnType>(column, ['PgLine'])) {
schema = type([type.number, type.number, type.number]);
} else if (isColumnType>(column, ['PgLineABC'])) {
schema = type({
a: type.number,
b: type.number,
c: type.number,
});
} // Handle other types
else if (isColumnType>(column, ['PgArray'])) {
const arraySchema = columnToSchema(column.baseColumn).array();
schema = column.size ? arraySchema.exactlyLength(column.size) : arraySchema;
} else if (column.dataType === 'array') {
schema = type.unknown.array();
} else if (column.dataType === 'number') {
schema = numberColumnToSchema(column);
} else if (column.dataType === 'bigint') {
schema = bigintColumnToSchema(column);
} else if (column.dataType === 'boolean') {
schema = type.boolean;
} else if (column.dataType === 'date') {
schema = type.Date;
} else if (column.dataType === 'string') {
schema = stringColumnToSchema(column);
} else if (column.dataType === 'json') {
schema = jsonSchema;
} else if (column.dataType === 'custom') {
schema = type.unknown;
} else if (column.dataType === 'buffer') {
schema = bufferSchema;
}
}
if (!schema) {
schema = type.unknown;
}
return schema;
}
function numberColumnToSchema(column: Column): Type {
let unsigned = column.getSQLType().includes('unsigned');
let min!: number;
let max!: number;
let integer = false;
if (isColumnType | SingleStoreTinyInt>(column, ['MySqlTinyInt', 'SingleStoreTinyInt'])) {
min = unsigned ? 0 : CONSTANTS.INT8_MIN;
max = unsigned ? CONSTANTS.INT8_UNSIGNED_MAX : CONSTANTS.INT8_MAX;
integer = true;
} else if (
isColumnType | PgSmallSerial | MySqlSmallInt | SingleStoreSmallInt>(column, [
'PgSmallInt',
'PgSmallSerial',
'MySqlSmallInt',
'SingleStoreSmallInt',
])
) {
min = unsigned ? 0 : CONSTANTS.INT16_MIN;
max = unsigned ? CONSTANTS.INT16_UNSIGNED_MAX : CONSTANTS.INT16_MAX;
integer = true;
} else if (
isColumnType<
PgReal | MySqlFloat | MySqlMediumInt | SingleStoreFloat | SingleStoreMediumInt
>(column, [
'PgReal',
'MySqlFloat',
'MySqlMediumInt',
'SingleStoreFloat',
'SingleStoreMediumInt',
])
) {
min = unsigned ? 0 : CONSTANTS.INT24_MIN;
max = unsigned ? CONSTANTS.INT24_UNSIGNED_MAX : CONSTANTS.INT24_MAX;
integer = isColumnType(column, ['MySqlMediumInt', 'SingleStoreMediumInt']);
} else if (
isColumnType | PgSerial | MySqlInt | SingleStoreInt>(column, [
'PgInteger',
'PgSerial',
'MySqlInt',
'SingleStoreInt',
])
) {
min = unsigned ? 0 : CONSTANTS.INT32_MIN;
max = unsigned ? CONSTANTS.INT32_UNSIGNED_MAX : CONSTANTS.INT32_MAX;
integer = true;
} else if (
isColumnType<
| PgDoublePrecision
| MySqlReal
| MySqlDouble
| SingleStoreReal
| SingleStoreDouble
| SQLiteReal
>(column, [
'PgDoublePrecision',
'MySqlReal',
'MySqlDouble',
'SingleStoreReal',
'SingleStoreDouble',
'SQLiteReal',
])
) {
min = unsigned ? 0 : CONSTANTS.INT48_MIN;
max = unsigned ? CONSTANTS.INT48_UNSIGNED_MAX : CONSTANTS.INT48_MAX;
} else if (
isColumnType<
| PgBigInt53
| PgBigSerial53
| MySqlBigInt53
| MySqlSerial
| SingleStoreBigInt53
| SingleStoreSerial
| SQLiteInteger
>(
column,
[
'PgBigInt53',
'PgBigSerial53',
'MySqlBigInt53',
'MySqlSerial',
'SingleStoreBigInt53',
'SingleStoreSerial',
'SQLiteInteger',
],
)
) {
unsigned = unsigned || isColumnType(column, ['MySqlSerial', 'SingleStoreSerial']);
min = unsigned ? 0 : Number.MIN_SAFE_INTEGER;
max = Number.MAX_SAFE_INTEGER;
integer = true;
} else if (isColumnType | SingleStoreYear>(column, ['MySqlYear', 'SingleStoreYear'])) {
min = 1901;
max = 2155;
integer = true;
} else {
min = Number.MIN_SAFE_INTEGER;
max = Number.MAX_SAFE_INTEGER;
}
return (integer ? type.keywords.number.integer : type.number).atLeast(min).atMost(max);
}
/** @internal */
export const unsignedBigintNarrow = (v: bigint, ctx: { mustBe: (expected: string) => false }) =>
v < 0n ? ctx.mustBe('greater than') : v > CONSTANTS.INT64_UNSIGNED_MAX ? ctx.mustBe('less than') : true;
/** @internal */
export const bigintNarrow = (v: bigint, ctx: { mustBe: (expected: string) => false }) =>
v < CONSTANTS.INT64_MIN ? ctx.mustBe('greater than') : v > CONSTANTS.INT64_MAX ? ctx.mustBe('less than') : true;
function bigintColumnToSchema(column: Column): Type {
const unsigned = column.getSQLType().includes('unsigned');
return type.bigint.narrow(unsigned ? unsignedBigintNarrow : bigintNarrow);
}
function stringColumnToSchema(column: Column): Type {
if (isColumnType>>(column, ['PgUUID'])) {
return type(/^[\da-f]{8}(?:-[\da-f]{4}){3}-[\da-f]{12}$/iu).describe('a RFC-4122-compliant UUID');
}
if (
isColumnType<
PgBinaryVector<
ColumnBaseConfig<'string', 'PgBinaryVector'> & {
dimensions: number;
}
>
>(column, ['PgBinaryVector'])
) {
return type(`/^[01]{${column.dimensions}}$/`)
.describe(`a string containing ones or zeros while being ${column.dimensions} characters long`);
}
let max: number | undefined;
let fixed = false;
if (isColumnType | SQLiteText>(column, ['PgVarchar', 'SQLiteText'])) {
max = column.length;
} else if (
isColumnType | SingleStoreVarChar>(column, ['MySqlVarChar', 'SingleStoreVarChar'])
) {
max = column.length ?? CONSTANTS.INT16_UNSIGNED_MAX;
} else if (isColumnType | SingleStoreText>(column, ['MySqlText', 'SingleStoreText'])) {
if (column.textType === 'longtext') {
max = CONSTANTS.INT32_UNSIGNED_MAX;
} else if (column.textType === 'mediumtext') {
max = CONSTANTS.INT24_UNSIGNED_MAX;
} else if (column.textType === 'text') {
max = CONSTANTS.INT16_UNSIGNED_MAX;
} else {
max = CONSTANTS.INT8_UNSIGNED_MAX;
}
}
if (
isColumnType | MySqlChar | SingleStoreChar>(column, [
'PgChar',
'MySqlChar',
'SingleStoreChar',
])
) {
max = column.length;
fixed = true;
}
return max && fixed ? type.string.exactlyLength(max) : max ? type.string.atMostLength(max) : type.string;
}
================================================
FILE: drizzle-arktype/src/column.types.ts
================================================
import { Type, type } from 'arktype';
import type { Column } from 'drizzle-orm';
import type { Json } from './utils.ts';
export type ArktypeNullable = Type | null>;
export type ArktypeOptional = [Type>, '?'];
export type GetArktypeType<
TColumn extends Column,
> = TColumn['_']['columnType'] extends
'PgJson' | 'PgJsonb' | 'MySqlJson' | 'SingleStoreJson' | 'SQLiteTextJson' | 'SQLiteBlobJson'
? unknown extends TColumn['_']['data'] ? Type : Type
: Type;
type HandleSelectColumn<
TSchema,
TColumn extends Column,
> = TColumn['_']['notNull'] extends true ? TSchema
: ArktypeNullable;
type HandleInsertColumn<
TSchema,
TColumn extends Column,
> = TColumn['_']['notNull'] extends true ? TColumn['_']['hasDefault'] extends true ? ArktypeOptional
: TSchema
: ArktypeOptional>;
type HandleUpdateColumn<
TSchema,
TColumn extends Column,
> = TColumn['_']['notNull'] extends true ? ArktypeOptional
: ArktypeOptional>;
export type HandleColumn<
TType extends 'select' | 'insert' | 'update',
TColumn extends Column,
> = TType extends 'select' ? HandleSelectColumn, TColumn>
: TType extends 'insert' ? HandleInsertColumn, TColumn>
: TType extends 'update' ? HandleUpdateColumn, TColumn>
: GetArktypeType;
================================================
FILE: drizzle-arktype/src/constants.ts
================================================
export const CONSTANTS = {
INT8_MIN: -128,
INT8_MAX: 127,
INT8_UNSIGNED_MAX: 255,
INT16_MIN: -32768,
INT16_MAX: 32767,
INT16_UNSIGNED_MAX: 65535,
INT24_MIN: -8388608,
INT24_MAX: 8388607,
INT24_UNSIGNED_MAX: 16777215,
INT32_MIN: -2147483648,
INT32_MAX: 2147483647,
INT32_UNSIGNED_MAX: 4294967295,
INT48_MIN: -140737488355328,
INT48_MAX: 140737488355327,
INT48_UNSIGNED_MAX: 281474976710655,
INT64_MIN: -9223372036854775808n,
INT64_MAX: 9223372036854775807n,
INT64_UNSIGNED_MAX: 18446744073709551615n,
};
================================================
FILE: drizzle-arktype/src/index.ts
================================================
export { bufferSchema, jsonSchema, literalSchema } from './column.ts';
export * from './column.types.ts';
export * from './schema.ts';
export * from './schema.types.internal.ts';
export * from './schema.types.ts';
export * from './utils.ts';
================================================
FILE: drizzle-arktype/src/schema.ts
================================================
import { Type, type } from 'arktype';
import { Column, getTableColumns, getViewSelectedFields, is, isTable, isView, SQL } from 'drizzle-orm';
import type { Table, View } from 'drizzle-orm';
import type { PgEnum } from 'drizzle-orm/pg-core';
import { columnToSchema } from './column.ts';
import type { Conditions } from './schema.types.internal.ts';
import type { CreateInsertSchema, CreateSelectSchema, CreateUpdateSchema } from './schema.types.ts';
import { isPgEnum } from './utils.ts';
function getColumns(tableLike: Table | View) {
return isTable(tableLike) ? getTableColumns(tableLike) : getViewSelectedFields(tableLike);
}
function handleColumns(
columns: Record,
refinements: Record,
conditions: Conditions,
): Type {
const columnSchemas: Record = {};
for (const [key, selected] of Object.entries(columns)) {
if (!is(selected, Column) && !is(selected, SQL) && !is(selected, SQL.Aliased) && typeof selected === 'object') {
const columns = isTable(selected) || isView(selected) ? getColumns(selected) : selected;
columnSchemas[key] = handleColumns(columns, refinements[key] ?? {}, conditions);
continue;
}
const refinement = refinements[key];
if (
refinement !== undefined
&& (typeof refinement !== 'function' || (typeof refinement === 'function' && refinement.expression !== undefined))
) {
columnSchemas[key] = refinement;
continue;
}
const column = is(selected, Column) ? selected : undefined;
const schema = column ? columnToSchema(column) : type.unknown;
const refined = typeof refinement === 'function' ? refinement(schema) : schema;
if (conditions.never(column)) {
continue;
} else {
columnSchemas[key] = refined;
}
if (column) {
if (conditions.nullable(column)) {
columnSchemas[key] = columnSchemas[key]!.or(type.null);
}
if (conditions.optional(column)) {
columnSchemas[key] = columnSchemas[key]!.optional() as any;
}
}
}
return type(columnSchemas);
}
export const createSelectSchema = ((
entity: Table | View | PgEnum<[string, ...string[]]>,
refine?: Record,
) => {
if (isPgEnum(entity)) {
return type.enumerated(...entity.enumValues);
}
const columns = getColumns(entity);
return handleColumns(columns, refine ?? {}, {
never: () => false,
optional: () => false,
nullable: (column) => !column.notNull,
}) as any;
}) as CreateSelectSchema;
export const createInsertSchema = ((
entity: Table,
refine?: Record,
) => {
const columns = getColumns(entity);
return handleColumns(columns, refine ?? {}, {
never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always',
optional: (column) => !column.notNull || (column.notNull && column.hasDefault),
nullable: (column) => !column.notNull,
}) as any;
}) as CreateInsertSchema;
export const createUpdateSchema = ((
entity: Table,
refine?: Record,
) => {
const columns = getColumns(entity);
return handleColumns(columns, refine ?? {}, {
never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always',
optional: () => true,
nullable: (column) => !column.notNull,
}) as any;
}) as CreateUpdateSchema;
================================================
FILE: drizzle-arktype/src/schema.types.internal.ts
================================================
import type { Type, type } from 'arktype';
import type { Column, DrizzleTypeError, SelectedFieldsFlat, Simplify, Table, View } from 'drizzle-orm';
import type { ArktypeNullable, ArktypeOptional, GetArktypeType, HandleColumn } from './column.types.ts';
import type { ColumnIsGeneratedAlwaysAs, GetSelection } from './utils.ts';
export interface Conditions {
never: (column?: Column) => boolean;
optional: (column: Column) => boolean;
nullable: (column: Column) => boolean;
}
type GenericSchema = type.cast | [type.cast, '?'];
type BuildRefineField = T extends GenericSchema ? ((schema: T) => GenericSchema) | GenericSchema : never;
export type BuildRefine<
TColumns extends Record,
> = {
[K in keyof TColumns as TColumns[K] extends Column | SelectedFieldsFlat | Table | View ? K : never]?:
TColumns[K] extends Column ? BuildRefineField>
: BuildRefine>;
};
type HandleRefinement<
TType extends 'select' | 'insert' | 'update',
TRefinement,
TColumn extends Column,
> = TRefinement extends (schema: any) => GenericSchema ? (
TColumn['_']['notNull'] extends true ? ReturnType
: ArktypeNullable>
) extends infer TSchema ? TType extends 'update' ? ArktypeOptional
: TSchema
: Type
: TRefinement;
type IsRefinementDefined<
TRefinements extends Record | undefined,
TKey extends string | symbol | number,
> = TRefinements extends object ? TRefinements[TKey] extends GenericSchema | ((schema: any) => any) ? true
: false
: false;
export type BuildSchema<
TType extends 'select' | 'insert' | 'update',
TColumns extends Record,
TRefinements extends Record | undefined,
> = type.instantiate<
Simplify<
{
readonly [K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? never : K]:
TColumns[K] extends infer TColumn extends Column
? IsRefinementDefined extends true
? HandleRefinement
: HandleColumn
: TColumns[K] extends infer TNested extends SelectedFieldsFlat | Table | View ? BuildSchema<
TType,
GetSelection,
TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined
>
: any;
}
>
>;
export type NoUnknownKeys<
TRefinement extends Record,
TCompare extends Record,
> = {
[K in keyof TRefinement]: K extends keyof TCompare
? TRefinement[K] extends Record ? NoUnknownKeys
: TRefinement[K]
: DrizzleTypeError<`Found unknown key in refinement: "${K & string}"`>;
};
================================================
FILE: drizzle-arktype/src/schema.types.ts
================================================
import type { Type } from 'arktype';
import type { Table, View } from 'drizzle-orm';
import type { PgEnum } from 'drizzle-orm/pg-core';
import type { BuildRefine, BuildSchema, NoUnknownKeys } from './schema.types.internal.ts';
export interface CreateSelectSchema {
(table: TTable): BuildSchema<'select', TTable['_']['columns'], undefined>;
<
TTable extends Table,
TRefine extends BuildRefine,
>(
table: TTable,
refine?: NoUnknownKeys,
): BuildSchema<'select', TTable['_']['columns'], TRefine>;
(view: TView): BuildSchema<'select', TView['_']['selectedFields'], undefined>;
<
TView extends View,
TRefine extends BuildRefine,
>(
view: TView,
refine: NoUnknownKeys,
): BuildSchema<'select', TView['_']['selectedFields'], TRefine>;
>(enum_: TEnum): Type;
}
export interface CreateInsertSchema {
(table: TTable): BuildSchema<'insert', TTable['_']['columns'], undefined>;
<
TTable extends Table,
TRefine extends BuildRefine>,
>(
table: TTable,
refine?: NoUnknownKeys,
): BuildSchema<'insert', TTable['_']['columns'], TRefine>;
}
export interface CreateUpdateSchema {
(table: TTable): BuildSchema<'update', TTable['_']['columns'], undefined>;
<
TTable extends Table,
TRefine extends BuildRefine>,
>(
table: TTable,
refine?: TRefine,
): BuildSchema<'update', TTable['_']['columns'], TRefine>;
}
================================================
FILE: drizzle-arktype/src/utils.ts
================================================
import type { type } from 'arktype';
import type { Column, SelectedFieldsFlat, Table, View } from 'drizzle-orm';
import type { PgEnum } from 'drizzle-orm/pg-core';
import type { literalSchema } from './column.ts';
export function isColumnType(column: Column, columnTypes: string[]): column is T {
return columnTypes.includes(column.columnType);
}
export function isWithEnum(column: Column): column is typeof column & { enumValues: [string, ...string[]] } {
return 'enumValues' in column && Array.isArray(column.enumValues) && column.enumValues.length > 0;
}
export const isPgEnum: (entity: any) => entity is PgEnum<[string, ...string[]]> = isWithEnum as any;
type Literal = type.infer;
export type Json = Literal | Record | any[];
export type ColumnIsGeneratedAlwaysAs = TColumn extends Column
? TColumn['_']['identity'] extends 'always' ? true
: TColumn['_']['generated'] extends { type: 'byDefault' } | undefined ? false
: true
: false;
export type GetSelection | Table | View> = T extends Table ? T['_']['columns']
: T extends View ? T['_']['selectedFields']
: T;
================================================
FILE: drizzle-arktype/tests/mysql.test.ts
================================================
import { Type, type } from 'arktype';
import { type Equal, sql } from 'drizzle-orm';
import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text } from 'drizzle-orm/mysql-core';
import type { TopLevelCondition } from 'json-rules-engine';
import { test } from 'vitest';
import { bigintNarrow, jsonSchema, unsignedBigintNarrow } from '~/column.ts';
import { CONSTANTS } from '~/constants.ts';
import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src';
import { Expect, expectSchemaShape } from './utils.ts';
const intSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX);
const serialNumberModeSchema = type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER);
const textSchema = type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX);
test('table - select', (t) => {
const table = mysqlTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const result = createSelectSchema(table);
const expected = type({ id: serialNumberModeSchema, name: textSchema });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('table in schema - select', (tc) => {
const schema = mysqlSchema('test');
const table = schema.table('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const result = createSelectSchema(table);
const expected = type({ id: serialNumberModeSchema, name: textSchema });
expectSchemaShape(tc, expected).from(result);
Expect>();
});
test('table - insert', (t) => {
const table = mysqlTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
age: int(),
});
const result = createInsertSchema(table);
const expected = type({
id: serialNumberModeSchema.optional(),
name: textSchema,
age: intSchema.or(type.null).optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('table - update', (t) => {
const table = mysqlTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
age: int(),
});
const result = createUpdateSchema(table);
const expected = type({
id: serialNumberModeSchema.optional(),
name: textSchema.optional(),
age: intSchema.or(type.null).optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('view qb - select', (t) => {
const table = mysqlTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table));
const result = createSelectSchema(view);
const expected = type({ id: serialNumberModeSchema, age: type('unknown.any') });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('view columns - select', (t) => {
const view = mysqlView('test', {
id: serial().primaryKey(),
name: text().notNull(),
}).as(sql``);
const result = createSelectSchema(view);
const expected = type({ id: serialNumberModeSchema, name: textSchema });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('view with nested fields - select', (t) => {
const table = mysqlTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const view = mysqlView('test').as((qb) =>
qb.select({
id: table.id,
nested: {
name: table.name,
age: sql``.as('age'),
},
table,
}).from(table)
);
const result = createSelectSchema(view);
const expected = type({
id: serialNumberModeSchema,
nested: type({ name: textSchema, age: type('unknown.any') }),
table: type({ id: serialNumberModeSchema, name: textSchema }),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('nullability - select', (t) => {
const table = mysqlTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().default(1),
c4: int().notNull().default(1),
});
const result = createSelectSchema(table);
const expected = type({
c1: intSchema.or(type.null),
c2: intSchema,
c3: intSchema.or(type.null),
c4: intSchema,
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('nullability - insert', (t) => {
const table = mysqlTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().default(1),
c4: int().notNull().default(1),
c5: int().generatedAlwaysAs(1),
});
const result = createInsertSchema(table);
const expected = type({
c1: intSchema.or(type.null).optional(),
c2: intSchema,
c3: intSchema.or(type.null).optional(),
c4: intSchema.optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('nullability - update', (t) => {
const table = mysqlTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().default(1),
c4: int().notNull().default(1),
c5: int().generatedAlwaysAs(1),
});
const result = createUpdateSchema(table);
const expected = type({
c1: intSchema.or(type.null).optional(),
c2: intSchema.optional(),
c3: intSchema.or(type.null).optional(),
c4: intSchema.optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - select', (t) => {
const table = mysqlTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().notNull(),
});
const result = createSelectSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
});
const expected = type({
c1: intSchema.or(type.null),
c2: intSchema.atMost(1000),
c3: type.string.pipe(Number),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - select with custom data type', (t) => {
const customText = customType({ dataType: () => 'text' });
const table = mysqlTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().notNull(),
c4: customText(),
});
const customTextSchema = type.string.atLeastLength(1).atMostLength(100);
const result = createSelectSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
c4: customTextSchema,
});
const expected = type({
c1: intSchema.or(type.null),
c2: intSchema.atMost(1000),
c3: type.string.pipe(Number),
c4: customTextSchema,
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - insert', (t) => {
const table = mysqlTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().notNull(),
c4: int().generatedAlwaysAs(1),
});
const result = createInsertSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
});
const expected = type({
c1: intSchema.or(type.null).optional(),
c2: intSchema.atMost(1000),
c3: type.string.pipe(Number),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - update', (t) => {
const table = mysqlTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().notNull(),
c4: int().generatedAlwaysAs(1),
});
const result = createUpdateSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
});
const expected = type({
c1: intSchema.or(type.null).optional(),
c2: intSchema.atMost(1000).optional(),
c3: type.string.pipe(Number),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine view - select', (t) => {
const table = mysqlTable('test', {
c1: int(),
c2: int(),
c3: int(),
c4: int(),
c5: int(),
c6: int(),
});
const view = mysqlView('test').as((qb) =>
qb.select({
c1: table.c1,
c2: table.c2,
c3: table.c3,
nested: {
c4: table.c4,
c5: table.c5,
c6: table.c6,
},
table,
}).from(table)
);
const result = createSelectSchema(view, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
nested: {
c5: (schema) => schema.atMost(1000),
c6: type.string.pipe(Number),
},
table: {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
},
});
const expected = type({
c1: intSchema.or(type.null),
c2: intSchema.atMost(1000).or(type.null),
c3: type.string.pipe(Number),
nested: type({
c4: intSchema.or(type.null),
c5: intSchema.atMost(1000).or(type.null),
c6: type.string.pipe(Number),
}),
table: type({
c1: intSchema.or(type.null),
c2: intSchema.atMost(1000).or(type.null),
c3: type.string.pipe(Number),
c4: intSchema.or(type.null),
c5: intSchema.or(type.null),
c6: intSchema.or(type.null),
}),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('all data types', (t) => {
const table = mysqlTable('test', ({
bigint,
binary,
boolean,
char,
date,
datetime,
decimal,
double,
float,
int,
json,
mediumint,
mysqlEnum,
real,
serial,
smallint,
text,
time,
timestamp,
tinyint,
varchar,
varbinary,
year,
longtext,
mediumtext,
tinytext,
}) => ({
bigint1: bigint({ mode: 'number' }).notNull(),
bigint2: bigint({ mode: 'bigint' }).notNull(),
bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(),
bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(),
binary: binary({ length: 10 }).notNull(),
boolean: boolean().notNull(),
char1: char({ length: 10 }).notNull(),
char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(),
date1: date({ mode: 'date' }).notNull(),
date2: date({ mode: 'string' }).notNull(),
datetime1: datetime({ mode: 'date' }).notNull(),
datetime2: datetime({ mode: 'string' }).notNull(),
decimal1: decimal().notNull(),
decimal2: decimal({ unsigned: true }).notNull(),
double1: double().notNull(),
double2: double({ unsigned: true }).notNull(),
float1: float().notNull(),
float2: float({ unsigned: true }).notNull(),
int1: int().notNull(),
int2: int({ unsigned: true }).notNull(),
json: json().notNull(),
mediumint1: mediumint().notNull(),
mediumint2: mediumint({ unsigned: true }).notNull(),
enum: mysqlEnum('enum', ['a', 'b', 'c']).notNull(),
real: real().notNull(),
serial: serial().notNull(),
smallint1: smallint().notNull(),
smallint2: smallint({ unsigned: true }).notNull(),
text1: text().notNull(),
text2: text({ enum: ['a', 'b', 'c'] }).notNull(),
time: time().notNull(),
timestamp1: timestamp({ mode: 'date' }).notNull(),
timestamp2: timestamp({ mode: 'string' }).notNull(),
tinyint1: tinyint().notNull(),
tinyint2: tinyint({ unsigned: true }).notNull(),
varchar1: varchar({ length: 10 }).notNull(),
varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(),
varbinary: varbinary({ length: 10 }).notNull(),
year: year().notNull(),
longtext1: longtext().notNull(),
longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(),
mediumtext1: mediumtext().notNull(),
mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(),
tinytext1: tinytext().notNull(),
tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(),
}));
const result = createSelectSchema(table);
const expected = type({
bigint1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER),
bigint2: type.bigint.narrow(bigintNarrow),
bigint3: type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER),
bigint4: type.bigint.narrow(unsignedBigintNarrow),
binary: type.string,
boolean: type.boolean,
char1: type.string.exactlyLength(10),
char2: type.enumerated('a', 'b', 'c'),
date1: type.Date,
date2: type.string,
datetime1: type.Date,
datetime2: type.string,
decimal1: type.string,
decimal2: type.string,
double1: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX),
double2: type.number.atLeast(0).atMost(CONSTANTS.INT48_UNSIGNED_MAX),
float1: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX),
float2: type.number.atLeast(0).atMost(CONSTANTS.INT24_UNSIGNED_MAX),
int1: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX),
int2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT32_UNSIGNED_MAX),
json: jsonSchema,
mediumint1: type.keywords.number.integer.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX),
mediumint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT24_UNSIGNED_MAX),
enum: type.enumerated('a', 'b', 'c'),
real: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX),
serial: type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER),
smallint1: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX),
smallint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT16_UNSIGNED_MAX),
text1: type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX),
text2: type.enumerated('a', 'b', 'c'),
time: type.string,
timestamp1: type.Date,
timestamp2: type.string,
tinyint1: type.keywords.number.integer.atLeast(CONSTANTS.INT8_MIN).atMost(CONSTANTS.INT8_MAX),
tinyint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT8_UNSIGNED_MAX),
varchar1: type.string.atMostLength(10),
varchar2: type.enumerated('a', 'b', 'c'),
varbinary: type.string,
year: type.keywords.number.integer.atLeast(1901).atMost(2155),
longtext1: type.string.atMostLength(CONSTANTS.INT32_UNSIGNED_MAX),
longtext2: type.enumerated('a', 'b', 'c'),
mediumtext1: type.string.atMostLength(CONSTANTS.INT24_UNSIGNED_MAX),
mediumtext2: type.enumerated('a', 'b', 'c'),
tinytext1: type.string.atMostLength(CONSTANTS.INT8_UNSIGNED_MAX),
tinytext2: type.enumerated('a', 'b', 'c'),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
/* Infinitely recursive type */ {
const TopLevelCondition: Type = type('unknown.any') as any;
const table = mysqlTable('test', {
json: json().$type(),
});
const result = createSelectSchema(table);
const expected = type({
json: TopLevelCondition.or(type.null),
});
Expect, type.infer>>();
}
/* Disallow unknown keys in table refinement - select */ {
const table = mysqlTable('test', { id: int() });
// @ts-expect-error
createSelectSchema(table, { unknown: type.string });
}
/* Disallow unknown keys in table refinement - insert */ {
const table = mysqlTable('test', { id: int() });
// @ts-expect-error
createInsertSchema(table, { unknown: type.string });
}
/* Disallow unknown keys in table refinement - update */ {
const table = mysqlTable('test', { id: int() });
// @ts-expect-error
createUpdateSchema(table, { unknown: type.string });
}
/* Disallow unknown keys in view qb - select */ {
const table = mysqlTable('test', { id: int() });
const view = mysqlView('test').as((qb) => qb.select().from(table));
const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table));
// @ts-expect-error
createSelectSchema(view, { unknown: type.string });
// @ts-expect-error
createSelectSchema(nestedSelect, { table: { unknown: type.string } });
}
/* Disallow unknown keys in view columns - select */ {
const view = mysqlView('test', { id: int() }).as(sql``);
// @ts-expect-error
createSelectSchema(view, { unknown: type.string });
}
================================================
FILE: drizzle-arktype/tests/pg.test.ts
================================================
import { Type, type } from 'arktype';
import { type Equal, sql } from 'drizzle-orm';
import {
customType,
integer,
json,
jsonb,
pgEnum,
pgMaterializedView,
pgSchema,
pgTable,
pgView,
serial,
text,
} from 'drizzle-orm/pg-core';
import type { TopLevelCondition } from 'json-rules-engine';
import { test } from 'vitest';
import { bigintNarrow, jsonSchema } from '~/column.ts';
import { CONSTANTS } from '~/constants.ts';
import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src';
import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts';
const integerSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX);
const textSchema = type.string;
test('table - select', (t) => {
const table = pgTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const result = createSelectSchema(table);
const expected = type({ id: integerSchema, name: textSchema });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('table in schema - select', (tc) => {
const schema = pgSchema('test');
const table = schema.table('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const result = createSelectSchema(table);
const expected = type({ id: integerSchema, name: textSchema });
expectSchemaShape(tc, expected).from(result);
Expect>();
});
test('table - insert', (t) => {
const table = pgTable('test', {
id: integer().generatedAlwaysAsIdentity().primaryKey(),
name: text().notNull(),
age: integer(),
});
const result = createInsertSchema(table);
const expected = type({ name: textSchema, age: integerSchema.or(type.null).optional() });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('table - update', (t) => {
const table = pgTable('test', {
id: integer().generatedAlwaysAsIdentity().primaryKey(),
name: text().notNull(),
age: integer(),
});
const result = createUpdateSchema(table);
const expected = type({
name: textSchema.optional(),
age: integerSchema.or(type.null).optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('view qb - select', (t) => {
const table = pgTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table));
const result = createSelectSchema(view);
const expected = type({ id: integerSchema, age: type('unknown.any') });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('view columns - select', (t) => {
const view = pgView('test', {
id: serial().primaryKey(),
name: text().notNull(),
}).as(sql``);
const result = createSelectSchema(view);
const expected = type({ id: integerSchema, name: textSchema });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('materialized view qb - select', (t) => {
const table = pgTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table));
const result = createSelectSchema(view);
const expected = type({ id: integerSchema, age: type('unknown.any') });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('materialized view columns - select', (t) => {
const view = pgView('test', {
id: serial().primaryKey(),
name: text().notNull(),
}).as(sql``);
const result = createSelectSchema(view);
const expected = type({ id: integerSchema, name: textSchema });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('view with nested fields - select', (t) => {
const table = pgTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const view = pgMaterializedView('test').as((qb) =>
qb.select({
id: table.id,
nested: {
name: table.name,
age: sql``.as('age'),
},
table,
}).from(table)
);
const result = createSelectSchema(view);
const expected = type({
id: integerSchema,
nested: { name: textSchema, age: type('unknown.any') },
table: { id: integerSchema, name: textSchema },
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('enum - select', (t) => {
const enum_ = pgEnum('test', ['a', 'b', 'c']);
const result = createSelectSchema(enum_);
const expected = type.enumerated('a', 'b', 'c');
expectEnumValues(t, expected).from(result);
Expect>();
});
test('nullability - select', (t) => {
const table = pgTable('test', {
c1: integer(),
c2: integer().notNull(),
c3: integer().default(1),
c4: integer().notNull().default(1),
});
const result = createSelectSchema(table);
const expected = type({
c1: integerSchema.or(type.null),
c2: integerSchema,
c3: integerSchema.or(type.null),
c4: integerSchema,
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('nullability - insert', (t) => {
const table = pgTable('test', {
c1: integer(),
c2: integer().notNull(),
c3: integer().default(1),
c4: integer().notNull().default(1),
c5: integer().generatedAlwaysAs(1),
c6: integer().generatedAlwaysAsIdentity(),
c7: integer().generatedByDefaultAsIdentity(),
});
const result = createInsertSchema(table);
const expected = type({
c1: integerSchema.or(type.null).optional(),
c2: integerSchema,
c3: integerSchema.or(type.null).optional(),
c4: integerSchema.optional(),
c7: integerSchema.optional(),
});
expectSchemaShape(t, expected).from(result);
});
test('nullability - update', (t) => {
const table = pgTable('test', {
c1: integer(),
c2: integer().notNull(),
c3: integer().default(1),
c4: integer().notNull().default(1),
c5: integer().generatedAlwaysAs(1),
c6: integer().generatedAlwaysAsIdentity(),
c7: integer().generatedByDefaultAsIdentity(),
});
const result = createUpdateSchema(table);
const expected = type({
c1: integerSchema.or(type.null).optional(),
c2: integerSchema.optional(),
c3: integerSchema.or(type.null).optional(),
c4: integerSchema.optional(),
c7: integerSchema.optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - select', (t) => {
const table = pgTable('test', {
c1: integer(),
c2: integer().notNull(),
c3: integer().notNull(),
});
const result = createSelectSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
});
const expected = type({
c1: integerSchema.or(type.null),
c2: integerSchema.atMost(1000),
c3: type.string.pipe(Number),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - select with custom data type', (t) => {
const customText = customType({ dataType: () => 'text' });
const table = pgTable('test', {
c1: integer(),
c2: integer().notNull(),
c3: integer().notNull(),
c4: customText(),
});
const customTextSchema = type.string.atLeastLength(1).atMostLength(100);
const result = createSelectSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
c4: customTextSchema,
});
const expected = type({
c1: integerSchema.or(type.null),
c2: integerSchema.atMost(1000),
c3: type.string.pipe(Number),
c4: customTextSchema,
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - insert', (t) => {
const table = pgTable('test', {
c1: integer(),
c2: integer().notNull(),
c3: integer().notNull(),
c4: integer().generatedAlwaysAs(1),
});
const result = createInsertSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
});
const expected = type({
c1: integerSchema.or(type.null).optional(),
c2: integerSchema.atMost(1000),
c3: type.string.pipe(Number),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - update', (t) => {
const table = pgTable('test', {
c1: integer(),
c2: integer().notNull(),
c3: integer().notNull(),
c4: integer().generatedAlwaysAs(1),
});
const result = createUpdateSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
});
const expected = type({
c1: integerSchema.or(type.null).optional(),
c2: integerSchema.atMost(1000).optional(),
c3: type.string.pipe(Number),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine view - select', (t) => {
const table = pgTable('test', {
c1: integer(),
c2: integer(),
c3: integer(),
c4: integer(),
c5: integer(),
c6: integer(),
});
const view = pgView('test').as((qb) =>
qb.select({
c1: table.c1,
c2: table.c2,
c3: table.c3,
nested: {
c4: table.c4,
c5: table.c5,
c6: table.c6,
},
table,
}).from(table)
);
const result = createSelectSchema(view, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
nested: {
c5: (schema) => schema.atMost(1000),
c6: type.string.pipe(Number),
},
table: {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
},
});
const expected = type({
c1: integerSchema.or(type.null),
c2: integerSchema.atMost(1000).or(type.null),
c3: type.string.pipe(Number),
nested: type({
c4: integerSchema.or(type.null),
c5: integerSchema.atMost(1000).or(type.null),
c6: type.string.pipe(Number),
}),
table: type({
c1: integerSchema.or(type.null),
c2: integerSchema.atMost(1000).or(type.null),
c3: type.string.pipe(Number),
c4: integerSchema.or(type.null),
c5: integerSchema.or(type.null),
c6: integerSchema.or(type.null),
}),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('all data types', (t) => {
const table = pgTable('test', ({
bigint,
bigserial,
bit,
boolean,
date,
char,
cidr,
doublePrecision,
geometry,
halfvec,
inet,
integer,
interval,
json,
jsonb,
line,
macaddr,
macaddr8,
numeric,
point,
real,
serial,
smallint,
smallserial,
text,
sparsevec,
time,
timestamp,
uuid,
varchar,
vector,
}) => ({
bigint1: bigint({ mode: 'number' }).notNull(),
bigint2: bigint({ mode: 'bigint' }).notNull(),
bigserial1: bigserial({ mode: 'number' }).notNull(),
bigserial2: bigserial({ mode: 'bigint' }).notNull(),
bit: bit({ dimensions: 5 }).notNull(),
boolean: boolean().notNull(),
date1: date({ mode: 'date' }).notNull(),
date2: date({ mode: 'string' }).notNull(),
char1: char({ length: 10 }).notNull(),
char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(),
cidr: cidr().notNull(),
doublePrecision: doublePrecision().notNull(),
geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(),
geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(),
halfvec: halfvec({ dimensions: 3 }).notNull(),
inet: inet().notNull(),
integer: integer().notNull(),
interval: interval().notNull(),
json: json().notNull(),
jsonb: jsonb().notNull(),
line1: line({ mode: 'abc' }).notNull(),
line2: line({ mode: 'tuple' }).notNull(),
macaddr: macaddr().notNull(),
macaddr8: macaddr8().notNull(),
numeric: numeric().notNull(),
point1: point({ mode: 'xy' }).notNull(),
point2: point({ mode: 'tuple' }).notNull(),
real: real().notNull(),
serial: serial().notNull(),
smallint: smallint().notNull(),
smallserial: smallserial().notNull(),
text1: text().notNull(),
text2: text({ enum: ['a', 'b', 'c'] }).notNull(),
sparsevec: sparsevec({ dimensions: 3 }).notNull(),
time: time().notNull(),
timestamp1: timestamp({ mode: 'date' }).notNull(),
timestamp2: timestamp({ mode: 'string' }).notNull(),
uuid: uuid().notNull(),
varchar1: varchar({ length: 10 }).notNull(),
varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(),
vector: vector({ dimensions: 3 }).notNull(),
array1: integer().array().notNull(),
array2: integer().array().array(2).notNull(),
array3: varchar({ length: 10 }).array().array(2).notNull(),
}));
const result = createSelectSchema(table);
const expected = type({
bigint1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER),
bigint2: type.bigint.narrow(bigintNarrow),
bigserial1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER),
bigserial2: type.bigint.narrow(bigintNarrow),
bit: type(/^[01]{5}$/).describe('a string containing ones or zeros while being 5 characters long'),
boolean: type.boolean,
date1: type.Date,
date2: type.string,
char1: type.string.exactlyLength(10),
char2: type.enumerated('a', 'b', 'c'),
cidr: type.string,
doublePrecision: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX),
geometry1: type([type.number, type.number]),
geometry2: type({ x: type.number, y: type.number }),
halfvec: type.number.array().exactlyLength(3),
inet: type.string,
integer: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX),
interval: type.string,
json: jsonSchema,
jsonb: jsonSchema,
line1: type({ a: type.number, b: type.number, c: type.number }),
line2: type([type.number, type.number, type.number]),
macaddr: type.string,
macaddr8: type.string,
numeric: type.string,
point1: type({ x: type.number, y: type.number }),
point2: type([type.number, type.number]),
real: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX),
serial: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX),
smallint: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX),
smallserial: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX),
text1: type.string,
text2: type.enumerated('a', 'b', 'c'),
sparsevec: type.string,
time: type.string,
timestamp1: type.Date,
timestamp2: type.string,
uuid: type(/^[\da-f]{8}(?:-[\da-f]{4}){3}-[\da-f]{12}$/iu).describe('a RFC-4122-compliant UUID'),
varchar1: type.string.atMostLength(10),
varchar2: type.enumerated('a', 'b', 'c'),
vector: type.number.array().exactlyLength(3),
array1: integerSchema.array(),
array2: integerSchema.array().array().exactlyLength(2),
array3: type.string.atMostLength(10).array().array().exactlyLength(2),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
/* Infinitely recursive type */ {
const TopLevelCondition: Type = type('unknown.any') as any;
const table = pgTable('test', {
json: json().$type().notNull(),
jsonb: jsonb().$type(),
});
const result = createSelectSchema(table);
const expected = type({
json: TopLevelCondition,
jsonb: TopLevelCondition.or(type.null),
});
Expect, type.infer>>();
}
/* Disallow unknown keys in table refinement - select */ {
const table = pgTable('test', { id: integer() });
// @ts-expect-error
createSelectSchema(table, { unknown: type.string });
}
/* Disallow unknown keys in table refinement - insert */ {
const table = pgTable('test', { id: integer() });
// @ts-expect-error
createInsertSchema(table, { unknown: type.string });
}
/* Disallow unknown keys in table refinement - update */ {
const table = pgTable('test', { id: integer() });
// @ts-expect-error
createUpdateSchema(table, { unknown: type.string });
}
/* Disallow unknown keys in view qb - select */ {
const table = pgTable('test', { id: integer() });
const view = pgView('test').as((qb) => qb.select().from(table));
const mView = pgMaterializedView('test').as((qb) => qb.select().from(table));
const nestedSelect = pgView('test').as((qb) => qb.select({ table }).from(table));
// @ts-expect-error
createSelectSchema(view, { unknown: type.string });
// @ts-expect-error
createSelectSchema(mView, { unknown: type.string });
// @ts-expect-error
createSelectSchema(nestedSelect, { table: { unknown: type.string } });
}
/* Disallow unknown keys in view columns - select */ {
const view = pgView('test', { id: integer() }).as(sql``);
const mView = pgView('test', { id: integer() }).as(sql``);
// @ts-expect-error
createSelectSchema(view, { unknown: type.string });
// @ts-expect-error
createSelectSchema(mView, { unknown: type.string });
}
================================================
FILE: drizzle-arktype/tests/singlestore.test.ts
================================================
import { Type, type } from 'arktype';
import { type Equal } from 'drizzle-orm';
import { customType, int, json, serial, singlestoreSchema, singlestoreTable, text } from 'drizzle-orm/singlestore-core';
import type { TopLevelCondition } from 'json-rules-engine';
import { test } from 'vitest';
import { bigintNarrow, jsonSchema, unsignedBigintNarrow } from '~/column.ts';
import { CONSTANTS } from '~/constants.ts';
import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src';
import { Expect, expectSchemaShape } from './utils.ts';
const intSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX);
const serialNumberModeSchema = type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER);
const textSchema = type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX);
test('table - select', (t) => {
const table = singlestoreTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const result = createSelectSchema(table);
const expected = type({ id: serialNumberModeSchema, name: textSchema });
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('table in schema - select', (tc) => {
const schema = singlestoreSchema('test');
const table = schema.table('test', {
id: serial().primaryKey(),
name: text().notNull(),
});
const result = createSelectSchema(table);
const expected = type({ id: serialNumberModeSchema, name: textSchema });
expectSchemaShape(tc, expected).from(result);
Expect>();
});
test('table - insert', (t) => {
const table = singlestoreTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
age: int(),
});
const result = createInsertSchema(table);
const expected = type({
id: serialNumberModeSchema.optional(),
name: textSchema,
age: intSchema.or(type.null).optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('table - update', (t) => {
const table = singlestoreTable('test', {
id: serial().primaryKey(),
name: text().notNull(),
age: int(),
});
const result = createUpdateSchema(table);
const expected = type({
id: serialNumberModeSchema.optional(),
name: textSchema.optional(),
age: intSchema.or(type.null).optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
// TODO: SingleStore doesn't support views yet. Add these tests when they're added
// test('view qb - select', (t) => {
// const table = singlestoreTable('test', {
// id: serial().primaryKey(),
// name: text().notNull(),
// });
// const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table));
// const result = createSelectSchema(view);
// const expected = v.object({ id: serialNumberModeSchema, age: v.any() });
// expectSchemaShape(t, expected).from(result);
// Expect>();
// });
// test('view columns - select', (t) => {
// const view = mysqlView('test', {
// id: serial().primaryKey(),
// name: text().notNull(),
// }).as(sql``);
// const result = createSelectSchema(view);
// const expected = v.object({ id: serialNumberModeSchema, name: textSchema });
// expectSchemaShape(t, expected).from(result);
// Expect>();
// });
// test('view with nested fields - select', (t) => {
// const table = singlestoreTable('test', {
// id: serial().primaryKey(),
// name: text().notNull(),
// });
// const view = mysqlView('test').as((qb) =>
// qb.select({
// id: table.id,
// nested: {
// name: table.name,
// age: sql``.as('age'),
// },
// table,
// }).from(table)
// );
// const result = createSelectSchema(view);
// const expected = v.object({
// id: serialNumberModeSchema,
// nested: v.object({ name: textSchema, age: v.any() }),
// table: v.object({ id: serialNumberModeSchema, name: textSchema }),
// });
// expectSchemaShape(t, expected).from(result);
// Expect>();
// });
test('nullability - select', (t) => {
const table = singlestoreTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().default(1),
c4: int().notNull().default(1),
});
const result = createSelectSchema(table);
const expected = type({
c1: intSchema.or(type.null),
c2: intSchema,
c3: intSchema.or(type.null),
c4: intSchema,
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('nullability - insert', (t) => {
const table = singlestoreTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().default(1),
c4: int().notNull().default(1),
c5: int().generatedAlwaysAs(1),
});
const result = createInsertSchema(table);
const expected = type({
c1: intSchema.or(type.null).optional(),
c2: intSchema,
c3: intSchema.or(type.null).optional(),
c4: intSchema.optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('nullability - update', (t) => {
const table = singlestoreTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().default(1),
c4: int().notNull().default(1),
c5: int().generatedAlwaysAs(1),
});
const result = createUpdateSchema(table);
const expected = type({
c1: intSchema.or(type.null).optional(),
c2: intSchema.optional(),
c3: intSchema.or(type.null).optional(),
c4: intSchema.optional(),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - select', (t) => {
const table = singlestoreTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().notNull(),
});
const result = createSelectSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
});
const expected = type({
c1: intSchema.or(type.null),
c2: intSchema.atMost(1000),
c3: type.string.pipe(Number),
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - select with custom data type', (t) => {
const customText = customType({ dataType: () => 'text' });
const table = singlestoreTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().notNull(),
c4: customText(),
});
const customTextSchema = type.string.atLeastLength(1).atMostLength(100);
const result = createSelectSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
c4: customTextSchema,
});
const expected = type({
c1: intSchema.or(type.null),
c2: intSchema.atMost(1000),
c3: type.string.pipe(Number),
c4: customTextSchema,
});
expectSchemaShape(t, expected).from(result);
Expect>();
});
test('refine table - insert', (t) => {
const table = singlestoreTable('test', {
c1: int(),
c2: int().notNull(),
c3: int().notNull(),
c4: int().generatedAlwaysAs(1),
});
const result = createInsertSchema(table, {
c2: (schema) => schema.atMost(1000),
c3: type.string.pipe(Number),
});
const expected = type({
c1: intSchema.or(type.null).optional(),
c2: intSchema.atMost(1000),
c3: type.string.pipe(Number),
});
expectSchemaShape(t, expected).from(result);
Expect